mirror of
https://github.com/strongswan/strongswan.git
synced 2025-08-20 00:01:59 -04:00
Compare commits
557 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
2560146204 | ||
|
ff06159099 | ||
|
ae2e0b6cf2 | ||
|
6c813ddc13 | ||
|
011c346b00 | ||
|
1b62e88980 | ||
|
58c567da74 | ||
|
85ebf6abd4 | ||
|
412231eecd | ||
|
e98ea89d99 | ||
|
23eb1e0945 | ||
|
4c54550352 | ||
|
bab415ec0a | ||
|
43b805b2da | ||
|
2c32412594 | ||
|
2dbeecfc02 | ||
|
a8c2d125f1 | ||
|
f88d824114 | ||
|
bd65a21ce0 | ||
|
85c6473a5e | ||
|
36f7c98f4e | ||
|
b46960d80c | ||
|
a339468c93 | ||
|
9eb5fcd6b6 | ||
|
1f42640c43 | ||
|
979c57fc30 | ||
|
a666944e65 | ||
|
bd4cee82ac | ||
|
dbcba117ae | ||
|
b944159fcf | ||
|
c7307ccc52 | ||
|
0f2cd032e1 | ||
|
c80819c0ad | ||
|
a7cb2fcbf6 | ||
|
059c70e556 | ||
|
4143e47462 | ||
|
a153626af7 | ||
|
e58ef258b5 | ||
|
9a6aa2530e | ||
|
faf7ad2331 | ||
|
f9985d72e4 | ||
|
2fa8f4a90f | ||
|
b39311e19e | ||
|
b8108a4c3c | ||
|
9dbb15dea9 | ||
|
6ddabf52d5 | ||
|
e864b8a8b1 | ||
|
82adb5ce0f | ||
|
71f1091129 | ||
|
3d426cbfee | ||
|
f38bb91654 | ||
|
85eb5c7812 | ||
|
879e3ce05a | ||
|
757e00c0ae | ||
|
d0292a6f50 | ||
|
217049606b | ||
|
7bfd81d78a | ||
|
3a5f203958 | ||
|
dc4fef146a | ||
|
b4a0eb3603 | ||
|
a7a3c4a22a | ||
|
46525cdc4f | ||
|
f5f7424e1d | ||
|
6372b2890f | ||
|
f32773b3a8 | ||
|
33db7a200f | ||
|
1afc76dd56 | ||
|
e175abaf89 | ||
|
419528f2ac | ||
|
72e3b7dcc8 | ||
|
b7d3349000 | ||
|
3b2f8cf282 | ||
|
d83fbe82e4 | ||
|
14e1ec2b77 | ||
|
73083503f2 | ||
|
d594171d9e | ||
|
bf34484d24 | ||
|
e24edb2991 | ||
|
0edaadfc94 | ||
|
f95bdb6fb0 | ||
|
c176d32a73 | ||
|
fbfae44dd1 | ||
|
a950ca3ec2 | ||
|
4a595508b7 | ||
|
65b7f9d563 | ||
|
d6eed3979b | ||
|
2082fa5dd2 | ||
|
8e7f379f71 | ||
|
af34b5b1dc | ||
|
bdf882d3af | ||
|
3a8bb93761 | ||
|
297be45275 | ||
|
17f2188756 | ||
|
5faf884285 | ||
|
a505f4b9b0 | ||
|
bdfcfea1f2 | ||
|
53be94d06c | ||
|
12395cedf3 | ||
|
aa1322aed5 | ||
|
d4575da53c | ||
|
749814a75f | ||
|
8f6e3c164a | ||
|
b6a4cfc705 | ||
|
8cb5918b0c | ||
|
6c7c539eaf | ||
|
58d6778adb | ||
|
e7fc7a4ecc | ||
|
9205458355 | ||
|
ad1ad2159f | ||
|
4b468126ca | ||
|
79815b4e67 | ||
|
ac0c73a412 | ||
|
5bb6f636ad | ||
|
769d9a12aa | ||
|
65b810e9b0 | ||
|
c563b0d930 | ||
|
6ae29af18b | ||
|
6e274271af | ||
|
5624f7ffaa | ||
|
b024b7e9a6 | ||
|
46c338a78f | ||
|
c5b2a8eaa3 | ||
|
84da416082 | ||
|
82c82cbbd6 | ||
|
0c9bac73d9 | ||
|
3e6d7db5e3 | ||
|
301887b865 | ||
|
981c82ab50 | ||
|
10c2985cdd | ||
|
7de05b918c | ||
|
2b1f0e8c6e | ||
|
4703ef00ce | ||
|
29986dd1e5 | ||
|
e3fa72b81a | ||
|
07a9926464 | ||
|
37ec770758 | ||
|
5f4988eb7c | ||
|
9a9d0a0bf7 | ||
|
362fa94ef5 | ||
|
688b9e27d5 | ||
|
f8e5e38b12 | ||
|
8d3855ba31 | ||
|
367e782054 | ||
|
94cc07cab4 | ||
|
2b3a5172d8 | ||
|
e8e5e2d441 | ||
|
99fda969b4 | ||
|
7ec0101250 | ||
|
198d112745 | ||
|
2ee768ec4e | ||
|
a1a477528f | ||
|
5863b8d89b | ||
|
4249d721ec | ||
|
2f2e4abe3c | ||
|
651a5b0ded | ||
|
09edb565ba | ||
|
0f1f375a21 | ||
|
77f99df656 | ||
|
523067e6db | ||
|
8ae00c334a | ||
|
57e74f64b3 | ||
|
d54a29cc5c | ||
|
b914333ab4 | ||
|
f2e88b169f | ||
|
fd17d154e5 | ||
|
defbabd724 | ||
|
245ea0597d | ||
|
ed8c08fbe7 | ||
|
9fe58c83fb | ||
|
8cb36be188 | ||
|
46674e64c1 | ||
|
8679d91c81 | ||
|
07978c16b3 | ||
|
6ed63be612 | ||
|
b0a4b7f2dd | ||
|
a6f4146f45 | ||
|
1a20502573 | ||
|
6cbd93838b | ||
|
a7c285bc50 | ||
|
af9095fdd9 | ||
|
ee4e93419b | ||
|
0bccc287d6 | ||
|
cdefe52494 | ||
|
d7305a556f | ||
|
353d5c130b | ||
|
d7eb3ed92e | ||
|
a1ab256756 | ||
|
022f2d5f30 | ||
|
02c43fa6e4 | ||
|
08428f6b5d | ||
|
5e4dedfc20 | ||
|
8036b3f932 | ||
|
d87be9b981 | ||
|
11978ddd39 | ||
|
d5d2568ff0 | ||
|
38d89f57f0 | ||
|
a7b5de5690 | ||
|
2553357f85 | ||
|
1f222f5dfb | ||
|
a103f3a284 | ||
|
25ec2bc43d | ||
|
378c75cb2e | ||
|
1e8cca4004 | ||
|
5a74d796a8 | ||
|
fcaee9e123 | ||
|
3c3a545bfe | ||
|
4e2cf58961 | ||
|
380ec66c92 | ||
|
a70ba4d600 | ||
|
8fc09ae158 | ||
|
3b0f260b40 | ||
|
2cf94745de | ||
|
e6b9f82a87 | ||
|
938f6d3777 | ||
|
251582d0b6 | ||
|
511add2111 | ||
|
61c0006002 | ||
|
8c1714ba12 | ||
|
de30b6b385 | ||
|
8e97e20642 | ||
|
af0535894c | ||
|
7205d02360 | ||
|
069a81e69a | ||
|
660e06b048 | ||
|
bff500dfd0 | ||
|
882b19c1df | ||
|
57703fa089 | ||
|
c3ae859b9b | ||
|
97bd0e2297 | ||
|
fd6ac87fc3 | ||
|
e7848e36fa | ||
|
f717bb5249 | ||
|
9c97ecbb31 | ||
|
e385a83f5e | ||
|
fad99c7a88 | ||
|
8e4ea2cbbd | ||
|
8c4e9f8c7b | ||
|
f740faccac | ||
|
0bce9839c9 | ||
|
a299a4d3ce | ||
|
40a37b6ffc | ||
|
9d4decbde8 | ||
|
5468759c71 | ||
|
31c44a758f | ||
|
e12540025d | ||
|
ff50db8758 | ||
|
1f0dd8d585 | ||
|
827c572efd | ||
|
e4d6bcef48 | ||
|
b93141985b | ||
|
f0f986c55d | ||
|
a47e282d09 | ||
|
0b6d42661d | ||
|
b021406f6b | ||
|
5237bf3a5c | ||
|
4e2c88f7ed | ||
|
288bd41aca | ||
|
26eef1f095 | ||
|
9a92088bb4 | ||
|
10d8b66f05 | ||
|
4f808cb2b0 | ||
|
87610799f2 | ||
|
ff2010c8da | ||
|
e6b040265d | ||
|
19925fd893 | ||
|
e0fc0adc93 | ||
|
4e634f4511 | ||
|
418ef2a7a1 | ||
|
d0dd7b561b | ||
|
9aa2be8411 | ||
|
941b7194a5 | ||
|
36c1cb4f8c | ||
|
c2e5c00df3 | ||
|
a50ed3006e | ||
|
00d8c36d6f | ||
|
abbf9d28b0 | ||
|
543a4c86f9 | ||
|
d38eaa6dd7 | ||
|
1d5c5a1d72 | ||
|
d860c26e95 | ||
|
41538cf259 | ||
|
0784ebdd2d | ||
|
e248f0f3c2 | ||
|
47d5adc96a | ||
|
52771c1392 | ||
|
a0353af6df | ||
|
504e6033d9 | ||
|
4cf0a5b631 | ||
|
227d7ef9a2 | ||
|
f1f0bd9de6 | ||
|
7af260a5f1 | ||
|
71f4c3dc4e | ||
|
24c20803a3 | ||
|
6f912345c1 | ||
|
90dac35927 | ||
|
9d29d522e5 | ||
|
95dbd5c858 | ||
|
cc8c86c673 | ||
|
ad3106d4f6 | ||
|
00b209be8d | ||
|
b0fcae3ea1 | ||
|
3babf1f710 | ||
|
d6a0de0837 | ||
|
a465c54805 | ||
|
ddd1126e96 | ||
|
65e121b498 | ||
|
24a9c32a43 | ||
|
caf81bc05c | ||
|
c8f16d18d8 | ||
|
c6ca688441 | ||
|
bea1f1100e | ||
|
832c811598 | ||
|
3615e907f5 | ||
|
1c053bc3f0 | ||
|
9e88c3f32e | ||
|
b3a72c7994 | ||
|
c8cfeeff54 | ||
|
cf9b174dfe | ||
|
c86f709b4b | ||
|
fefea48724 | ||
|
8b69327ad2 | ||
|
6cf84547d7 | ||
|
ac7500cccd | ||
|
b1858a9b9b | ||
|
4de6bb3feb | ||
|
f59ca9698a | ||
|
559298b53e | ||
|
5b677e612d | ||
|
5217920967 | ||
|
799b3076ab | ||
|
17bc5166d4 | ||
|
7e310e3425 | ||
|
af28aac85f | ||
|
2c18e87b25 | ||
|
def312d200 | ||
|
36d9b88837 | ||
|
5f31d6a9fc | ||
|
a42e24b762 | ||
|
f462e4b9ee | ||
|
b2210f446e | ||
|
a5e80cf5e4 | ||
|
e69e7c86e7 | ||
|
6735c3d7ca | ||
|
558529afe2 | ||
|
2e4c062512 | ||
|
11bb0a73b8 | ||
|
89acb24bd7 | ||
|
b891da52b4 | ||
|
8fc6340c05 | ||
|
93b6162d74 | ||
|
bd93dfb09b | ||
|
0cf08b45dd | ||
|
dc69cf2f65 | ||
|
b9e5764b75 | ||
|
17e0f20f57 | ||
|
fdc9e69523 | ||
|
6ae40ac581 | ||
|
950d4fe7a0 | ||
|
2dbcb15338 | ||
|
38bacea63b | ||
|
e7166c342b | ||
|
c9883d612b | ||
|
8060541f53 | ||
|
4df94b56c0 | ||
|
f766a7ed49 | ||
|
2099a52618 | ||
|
9de4efb1ae | ||
|
89f4b345e3 | ||
|
930381228b | ||
|
3b7c49bc31 | ||
|
1265d78cac | ||
|
8e3a373e18 | ||
|
4833f29b15 | ||
|
ec982171d9 | ||
|
d14bb3881b | ||
|
974f9c37df | ||
|
ebdaab6861 | ||
|
0a3889086d | ||
|
1bb6f1dd73 | ||
|
307dea6b5f | ||
|
cc53a04c7a | ||
|
ee19c3e7dd | ||
|
f58fdcddad | ||
|
09636199e6 | ||
|
40676786aa | ||
|
47de9ef9a1 | ||
|
8ea6997482 | ||
|
d4a0dd9f93 | ||
|
f09b8203d3 | ||
|
768fec23bc | ||
|
a2fba6db4a | ||
|
c87aae300a | ||
|
3d0f6958a9 | ||
|
bee06c9ec5 | ||
|
0be6fd7735 | ||
|
5019e3ece0 | ||
|
faf40b8d74 | ||
|
60336ceecb | ||
|
8e020bc9e3 | ||
|
089977b69d | ||
|
2ec6d50a3c | ||
|
be0af46d89 | ||
|
bf165afb78 | ||
|
fccc76449d | ||
|
74208e2cc3 | ||
|
2b1885b892 | ||
|
5a8f0767b8 | ||
|
7975a0cfa4 | ||
|
301abbeaff | ||
|
a4a3dcf6c2 | ||
|
961763b84d | ||
|
e81cf3bd36 | ||
|
cf3c90dba6 | ||
|
04c9316db5 | ||
|
9d4c7bebfc | ||
|
decccd4f63 | ||
|
cf7fb47788 | ||
|
957aae8f64 | ||
|
55a660d9f7 | ||
|
6928709886 | ||
|
12d2b6e2b4 | ||
|
8e2fde6230 | ||
|
3a850ae191 | ||
|
f3c7e5227c | ||
|
ddb9b274c2 | ||
|
d2b2e1b3fa | ||
|
7ad610a140 | ||
|
012d99ecf4 | ||
|
1d5e921911 | ||
|
882ff93bfd | ||
|
33e421320a | ||
|
37eeafa37f | ||
|
329a7b331d | ||
|
37c56affa1 | ||
|
d4fb07911f | ||
|
355f917532 | ||
|
c5a6938b9e | ||
|
e05d86b27a | ||
|
95275d2fe5 | ||
|
c200bd1668 | ||
|
d7760416d6 | ||
|
ca3e6d2d14 | ||
|
0d49ddec2e | ||
|
eff0c43a17 | ||
|
c14e4ab2a8 | ||
|
c36eaf42da | ||
|
ec0ec55070 | ||
|
f6b2e6a21f | ||
|
1212780b32 | ||
|
c4dac17d8c | ||
|
5c69262ce6 | ||
|
e5828d26ea | ||
|
91f09b8d25 | ||
|
515b9303de | ||
|
b9c69f9080 | ||
|
5c439bb8a3 | ||
|
a24993213e | ||
|
b8358936aa | ||
|
414db6cab1 | ||
|
041358976b | ||
|
aedf73f7cf | ||
|
25f2cdfc56 | ||
|
a45d454e94 | ||
|
cc9ab450d6 | ||
|
3e0495745c | ||
|
9cc5f4a511 | ||
|
fb6b8c833b | ||
|
2e059e0c27 | ||
|
22550bd262 | ||
|
a7f617ab33 | ||
|
abdc7878a4 | ||
|
22eded1da4 | ||
|
661f6bd0ad | ||
|
2601fabbb4 | ||
|
dc8fa1b3e8 | ||
|
b05628dd2d | ||
|
84bd011752 | ||
|
9b9cf2001f | ||
|
740cbb2c0a | ||
|
fe1c9dedb7 | ||
|
6064209872 | ||
|
880e273985 | ||
|
01c81ca15f | ||
|
51f746161d | ||
|
3286f75ffe | ||
|
38160c5cb7 | ||
|
9c4ceced1c | ||
|
4f2e65f3d0 | ||
|
81041b55d2 | ||
|
8a14c20ec7 | ||
|
7b78e35ff6 | ||
|
bed04baf21 | ||
|
59a4c9c416 | ||
|
e9a7c9822d | ||
|
72f9a21b22 | ||
|
56b6eeb385 | ||
|
7bfaa9acb6 | ||
|
da00a04f60 | ||
|
07ce6b44c5 | ||
|
0602ed1043 | ||
|
6eec5cc07d | ||
|
574bfad1c0 | ||
|
d759bd9efa | ||
|
c4bce2b79b | ||
|
a9ced3ccb4 | ||
|
dd256e730d | ||
|
d8c6fa3b9a | ||
|
710973f0b0 | ||
|
d02aea9c2c | ||
|
2b11764b70 | ||
|
1cbcf198ab | ||
|
dc1085734f | ||
|
49cb7b016f | ||
|
59587783ff | ||
|
fc6556fd18 | ||
|
8e88d56206 | ||
|
f8c6ff1fc1 | ||
|
3d7d527ad9 | ||
|
9228a5109b | ||
|
287ef047a9 | ||
|
059249bae7 | ||
|
f8e6fd30de | ||
|
5f99a28381 | ||
|
afeac365fd | ||
|
907079bd13 | ||
|
3a20170324 | ||
|
75c5c5667d | ||
|
9c208c4e46 | ||
|
e5bc3a50f6 | ||
|
1b19053919 | ||
|
b73a476c1f | ||
|
56f4b2096a | ||
|
ac713746c9 | ||
|
9ac6c469a5 | ||
|
84166508f8 | ||
|
540881627f | ||
|
f5b1ca4ef6 | ||
|
f3c8e02c69 | ||
|
dea8493f3a | ||
|
470ead96ea | ||
|
c3f8642e72 | ||
|
9acd90575a | ||
|
cfc7be004d | ||
|
a2ace8a6bb | ||
|
e9df6f5c3d | ||
|
dad4624756 | ||
|
b2f957f5f1 | ||
|
c035e4ca93 | ||
|
91f209b878 | ||
|
500207e35c | ||
|
6628c523c2 | ||
|
6f8275abab | ||
|
15612b3a42 | ||
|
ddd926b698 | ||
|
b29be6029e | ||
|
ea6a6344d3 | ||
|
bb67838c53 |
25
.cirrus.yml
25
.cirrus.yml
@ -1,11 +1,11 @@
|
||||
task:
|
||||
freebsd_task:
|
||||
matrix:
|
||||
- name: FreeBSD 14.0
|
||||
- name: FreeBSD 14.2
|
||||
freebsd_instance:
|
||||
image_family: freebsd-14-0
|
||||
- name: FreeBSD 13.2
|
||||
image_family: freebsd-14-2
|
||||
- name: FreeBSD 13.4
|
||||
freebsd_instance:
|
||||
image_family: freebsd-13-2
|
||||
image_family: freebsd-13-4
|
||||
|
||||
env:
|
||||
TESTS_REDUCED_KEYLENGTHS: yes
|
||||
@ -16,3 +16,18 @@ task:
|
||||
|
||||
install_script: ./scripts/test.sh deps
|
||||
script: ./scripts/test.sh
|
||||
|
||||
alpine_task:
|
||||
container:
|
||||
image: alpine:latest
|
||||
|
||||
env:
|
||||
TESTS_REDUCED_KEYLENGTHS: yes
|
||||
TESTS_NO_IPV6: yes
|
||||
LEAK_DETECTIVE: no
|
||||
MONOLITHIC: no
|
||||
TEST: alpine
|
||||
OS_NAME: alpine
|
||||
|
||||
install_script: ./scripts/test.sh deps
|
||||
script: ./scripts/test.sh
|
||||
|
@ -1,3 +1,3 @@
|
||||
ignore:
|
||||
- "*/suites/*"
|
||||
- "*/tests/*"
|
||||
- "**/suites/"
|
||||
- "**/tests/"
|
||||
|
1
.github/ISSUE_TEMPLATE/bug_report.md
vendored
1
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -2,6 +2,7 @@
|
||||
name: "🐛 Bug report"
|
||||
about: Report a reproducible bug or regression
|
||||
labels: bug, new
|
||||
type: Bug
|
||||
---
|
||||
|
||||
<!--
|
||||
|
1
.github/ISSUE_TEMPLATE/feature_request.md
vendored
1
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -2,6 +2,7 @@
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
labels: enhancement, new
|
||||
type: Feature
|
||||
---
|
||||
|
||||
<!--
|
||||
|
3
.github/actions/default/action.yml
vendored
3
.github/actions/default/action.yml
vendored
@ -5,9 +5,6 @@ runs:
|
||||
- name: "Install Dependencies"
|
||||
run: ./scripts/test.sh deps
|
||||
shell: bash
|
||||
- name: "Install Python Dependencies"
|
||||
run: ./scripts/test.sh pydeps
|
||||
shell: bash
|
||||
- name: "Build Dependencies"
|
||||
run: ./scripts/test.sh build-deps
|
||||
shell: bash
|
||||
|
20
.github/codeql/cpp-queries/chunk_from_chars.ql
vendored
20
.github/codeql/cpp-queries/chunk_from_chars.ql
vendored
@ -10,8 +10,7 @@
|
||||
* @precision very-high
|
||||
*/
|
||||
import cpp
|
||||
import DataFlow::PathGraph
|
||||
import semmle.code.cpp.dataflow.DataFlow
|
||||
import semmle.code.cpp.dataflow.new.DataFlow
|
||||
|
||||
class ChunkFromChars extends Expr {
|
||||
ChunkFromChars() {
|
||||
@ -23,29 +22,30 @@ class ChunkFromChars extends Expr {
|
||||
}
|
||||
}
|
||||
|
||||
class ChunkFromCharsUsage extends DataFlow::Configuration {
|
||||
ChunkFromCharsUsage() { this = "ChunkFromCharsUsage" }
|
||||
|
||||
override predicate isSource(DataFlow::Node source) {
|
||||
module ChunkFromCharsConfig implements DataFlow::ConfigSig {
|
||||
predicate isSource(DataFlow::Node source) {
|
||||
source.asExpr() instanceof ChunkFromChars
|
||||
}
|
||||
|
||||
override predicate isSink(DataFlow::Node sink) {
|
||||
predicate isSink(DataFlow::Node sink) {
|
||||
exists(sink.asExpr())
|
||||
}
|
||||
|
||||
override predicate isBarrierOut(DataFlow::Node node) {
|
||||
predicate isBarrierOut(DataFlow::Node node) {
|
||||
/* don't track beyond function calls */
|
||||
exists(FunctionCall fc | node.asExpr().getParent*() = fc)
|
||||
}
|
||||
}
|
||||
|
||||
module ChunkFromCharsFlow = DataFlow::Global<ChunkFromCharsConfig>;
|
||||
import ChunkFromCharsFlow::PathGraph
|
||||
|
||||
BlockStmt enclosingBlock(BlockStmt b) {
|
||||
result = b.getEnclosingBlock()
|
||||
}
|
||||
|
||||
from ChunkFromCharsUsage usage, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||
from ChunkFromCharsFlow::PathNode source, ChunkFromCharsFlow::PathNode sink
|
||||
where
|
||||
usage.hasFlowPath(source, sink)
|
||||
ChunkFromCharsFlow::flowPath(source, sink)
|
||||
and not source.getNode().asExpr().getEnclosingBlock() = enclosingBlock*(sink.getNode().asExpr().getEnclosingBlock())
|
||||
select source, source, sink, "Invalid use of chunk_from_chars() result in sibling/parent block."
|
||||
|
4
.github/workflows/android.yml
vendored
4
.github/workflows/android.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
android:
|
||||
needs: pre-check
|
||||
@ -36,7 +36,7 @@ jobs:
|
||||
run: |
|
||||
NDK_VERSION=$(grep "ndkVersion" src/frontends/android/app/build.gradle | sed -e 's/.*"\(.*\)"/\1/')
|
||||
echo Using NDK ${NDK_VERSION}
|
||||
yes | sudo ${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager --install "ndk;${NDK_VERSION}"
|
||||
yes | ${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager --install "ndk;${NDK_VERSION}"
|
||||
echo "ANDROID_NDK_ROOT=${ANDROID_HOME}/ndk/${NDK_VERSION}" >> "$GITHUB_OUTPUT"
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
|
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
analyze:
|
||||
needs: pre-check
|
||||
|
44
.github/workflows/linux.yml
vendored
44
.github/workflows/linux.yml
vendored
@ -21,12 +21,12 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
latest:
|
||||
needs: pre-check
|
||||
if: ${{ needs.pre-check.outputs.should_skip != 'true' }}
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: ${{ matrix.os || 'ubuntu-latest' }}
|
||||
strategy:
|
||||
matrix:
|
||||
test: [ all, default, printf-builtin ]
|
||||
@ -48,6 +48,9 @@ jobs:
|
||||
- test: no-dbg
|
||||
- test: no-dbg
|
||||
compiler: clang
|
||||
- test: no-testable-ke
|
||||
- test: no-testable-ke
|
||||
compiler: clang
|
||||
- test: fuzzing
|
||||
compiler: clang
|
||||
monolithic: yes
|
||||
@ -76,7 +79,13 @@ jobs:
|
||||
- uses: ./.github/actions/default
|
||||
- run: ccache -s
|
||||
- if: ${{ success() && matrix.test == 'coverage' }}
|
||||
run: bash <(curl -s https://codecov.io/bash)
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
disable_search: true
|
||||
fail_ci_if_error: true
|
||||
file: coverage/coverage.cleaned.info
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
verbose: true
|
||||
- if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
with:
|
||||
@ -84,24 +93,24 @@ jobs:
|
||||
path: config.log
|
||||
retention-days: 5
|
||||
|
||||
crypto-plugins:
|
||||
crypto:
|
||||
needs: pre-check
|
||||
if: ${{ needs.pre-check.outputs.should_skip != 'true' }}
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-latest, ubuntu-20.04 ]
|
||||
test: [ botan, wolfssl, openssl, openssl-3, openssl-awslc, gcrypt ]
|
||||
os: [ ubuntu-latest, ubuntu-22.04 ]
|
||||
leak-detective: [ no, yes ]
|
||||
exclude:
|
||||
# test custom-built libs only on one platform
|
||||
- os: ubuntu-20.04
|
||||
# test custom-built libs only on the latest platform
|
||||
- os: ubuntu-22.04
|
||||
test: botan
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
test: wolfssl
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
test: openssl-3
|
||||
- os: ubuntu-20.04
|
||||
- os: ubuntu-22.04
|
||||
test: openssl-awslc
|
||||
env:
|
||||
LEAK_DETECTIVE: ${{ matrix.leak-detective || 'no' }}
|
||||
@ -126,6 +135,12 @@ jobs:
|
||||
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||
ccache -z
|
||||
- uses: ./.github/actions/default
|
||||
- name: Test Vectors (detailed)
|
||||
env:
|
||||
TESTS_SUITES: vectors
|
||||
TESTS_VERBOSITY: 1
|
||||
run:
|
||||
./scripts/test.sh
|
||||
- run: ccache -s
|
||||
- if: ${{ failure() }}
|
||||
uses: actions/upload-artifact@v4
|
||||
@ -140,7 +155,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
strategy:
|
||||
matrix:
|
||||
os: [ ubuntu-20.04 ]
|
||||
os: [ ubuntu-22.04 ]
|
||||
test: [ all, nm ]
|
||||
compiler: [ gcc, clang ]
|
||||
exclude:
|
||||
@ -150,13 +165,14 @@ jobs:
|
||||
LEAK_DETECTIVE: ${{ matrix.leak-detective || 'no' }}
|
||||
CC: ${{ matrix.compiler || 'gcc' }}
|
||||
TEST: ${{ matrix.test }}
|
||||
# LSan causes spurious SIGSEGV after tests due to DTLS handling by glibc
|
||||
ASAN_OPTIONS: intercept_tls_get_addr=0
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.ccache
|
||||
# path is different on newer systems
|
||||
path: |
|
||||
~/.cache/ccache
|
||||
~/.ccache
|
||||
key: ccache-${{ matrix.os }}-${{ env.CC }}-${{ matrix.test }}-${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache-${{ matrix.os }}-${{ env.CC }}-${{ matrix.test }}-
|
||||
|
2
.github/workflows/macos.yml
vendored
2
.github/workflows/macos.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
macos:
|
||||
strategy:
|
||||
|
32
.github/workflows/sonarcloud.yml
vendored
32
.github/workflows/sonarcloud.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
sonarcloud:
|
||||
needs: pre-check
|
||||
@ -33,7 +33,6 @@ jobs:
|
||||
with:
|
||||
path: |
|
||||
~/.cache/ccache
|
||||
~/.sonar-cache
|
||||
key: ccache-sonarcloud-${{ github.sha }}
|
||||
restore-keys: |
|
||||
ccache-sonarcloud-
|
||||
@ -41,24 +40,17 @@ jobs:
|
||||
sudo apt-get install -qq ccache
|
||||
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||
ccache -z
|
||||
# using SonarSource/sonarcloud-github-action is currently not recommended
|
||||
# for C builds, so we follow the "any CI" instructions
|
||||
- name: Install sonar-scanner
|
||||
- uses: SonarSource/sonarqube-scan-action/install-build-wrapper@v4
|
||||
- run: |
|
||||
echo "BUILD_WRAPPER_OUT_DIR=$HOME/bw-output" >> $GITHUB_ENV
|
||||
- uses: ./.github/actions/default
|
||||
- uses: SonarSource/sonarqube-scan-action@v4
|
||||
env:
|
||||
SONAR_SCANNER_VERSION: 5.0.1.3006
|
||||
run: |
|
||||
export SONAR_SCANNER_HOME=$HOME/.sonar/sonar-scanner-$SONAR_SCANNER_VERSION-linux
|
||||
curl --create-dirs -sSLo $HOME/.sonar/sonar-scanner.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-$SONAR_SCANNER_VERSION-linux.zip
|
||||
unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/
|
||||
echo "SONAR_SCANNER_OPTS=-server" >> $GITHUB_ENV
|
||||
curl --create-dirs -sSLo $HOME/.sonar/build-wrapper-linux-x86.zip https://sonarcloud.io/static/cpp/build-wrapper-linux-x86.zip
|
||||
unzip -o $HOME/.sonar/build-wrapper-linux-x86.zip -d $HOME/.sonar/
|
||||
echo "PATH=$HOME/.sonar/build-wrapper-linux-x86:$SONAR_SCANNER_HOME/bin:$PATH" >> $GITHUB_ENV
|
||||
- env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
BUILD_NUMBER: ${{ github.run_id }}
|
||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||
SONAR_PROJECT: ${{ secrets.SONAR_PROJECT }}
|
||||
SONAR_ORGANIZATION: ${{ secrets.SONAR_ORGANIZATION }}
|
||||
uses: ./.github/actions/default
|
||||
with:
|
||||
args: >
|
||||
-Dsonar.projectKey=${{ secrets.SONAR_PROJECT }}
|
||||
-Dsonar.organization=${{ secrets.SONAR_ORGANIZATION }}
|
||||
-Dsonar.cfamily.threads=2
|
||||
-Dsonar.cfamily.compile-commands=${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json
|
||||
- run: ccache -s
|
||||
|
2
.github/workflows/tkm.yml
vendored
2
.github/workflows/tkm.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
tkm:
|
||||
needs: pre-check
|
||||
|
2
.github/workflows/windows.yml
vendored
2
.github/workflows/windows.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
||||
- id: skip-check
|
||||
uses: fkirc/skip-duplicate-actions@master
|
||||
with:
|
||||
concurrent_skipping: 'same_content'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
|
||||
cross-compile:
|
||||
needs: pre-check
|
||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -27,6 +27,7 @@ libtool
|
||||
y.tab.[ch]
|
||||
lex.yy.c
|
||||
*keywords.c
|
||||
!proposal_keywords.c
|
||||
plugin_constructors.c
|
||||
Doxyfile
|
||||
apidoc/
|
||||
@ -54,3 +55,4 @@ coverage/
|
||||
/*.includes
|
||||
test-driver
|
||||
nbproject/
|
||||
*.[si]
|
||||
|
@ -1,3 +1,3 @@
|
||||
Please refer to the [developer documentation](https://docs.strongswan.org/docs/5.9/devs/devs.html)
|
||||
Please refer to the [developer documentation](https://docs.strongswan.org/docs/latest/devs/devs.html)
|
||||
in our documentation for details regarding **code style** and
|
||||
[**contribution requirements**](https://docs.strongswan.org/docs/5.9/devs/contributions.html).
|
||||
[**contribution requirements**](https://docs.strongswan.org/docs/latest/devs/contributions.html).
|
||||
|
1208
Doxyfile.in
1208
Doxyfile.in
File diff suppressed because it is too large
Load Diff
1
HACKING
1
HACKING
@ -14,7 +14,6 @@ the code, you need the following tools:
|
||||
- autoconf
|
||||
- libtool
|
||||
- pkg-config
|
||||
- gettext
|
||||
- perl
|
||||
- python
|
||||
- lex/flex
|
||||
|
2
INSTALL
2
INSTALL
@ -144,4 +144,4 @@ Contents
|
||||
|
||||
For a more up-to-date list of recommended modules refer to:
|
||||
|
||||
* https://docs.strongswan.org/docs/5.9/install/kernelModules.html
|
||||
* https://docs.strongswan.org/docs/latest/install/kernelModules.html
|
||||
|
@ -65,10 +65,11 @@ cov-reset: cov-reset-common
|
||||
cov-report:
|
||||
@mkdir $(top_builddir)/coverage
|
||||
lcov -c -o $(top_builddir)/coverage/coverage.info -d $(top_builddir) \
|
||||
--rc lcov_branch_coverage=1
|
||||
--rc branch_coverage=1
|
||||
lcov -r $(top_builddir)/coverage/coverage.info '*/tests/*' '*/suites/*' '/usr*' \
|
||||
'*proposal_keywords_static.*' \
|
||||
-o $(abs_top_builddir)/coverage/coverage.cleaned.info \
|
||||
--rc lcov_branch_coverage=1
|
||||
--rc branch_coverage=1
|
||||
genhtml --num-spaces 4 --legend --branch-coverage --ignore-errors source \
|
||||
-t "$(PACKAGE_STRING)" \
|
||||
-o $(top_builddir)/coverage/html \
|
||||
|
138
NEWS
138
NEWS
@ -1,13 +1,143 @@
|
||||
strongswan-6.0.2
|
||||
----------------
|
||||
|
||||
- Support for per-CPU SAs (RFC 9611) has been added (Linux 6.13+).
|
||||
|
||||
- Basic support for AGGFRAG mode (RFC 9347) has been added (Linux 6.14+).
|
||||
|
||||
- POSIX regular expressions can be used to match remote identities.
|
||||
|
||||
- Switching configs based on EAP-Identities is supported. Setting
|
||||
`remote.eap_id` now always initiates an EAP-Identity exchange.
|
||||
|
||||
- On Linux, sequence numbers from acquires are used when installing SAs. This
|
||||
allows handling narrowing properly.
|
||||
|
||||
- During rekeying, the narrowed traffic selectors are now proposed instead of
|
||||
the configured ones.
|
||||
|
||||
- The default AH/ESP proposals contain all supported key exchange methods plus
|
||||
`none` to make PFS optional and accept proposals of older peers.
|
||||
|
||||
- GRO for ESP in enabled for NAT-T UDP sockets, which can improve performance
|
||||
if the esp4|6_offload modules are loaded.
|
||||
|
||||
- charon-nm sets the VPN connection as persistent, preventing NetworkManager
|
||||
from tearing down the connection if the network connectivity changes.
|
||||
|
||||
- ML-KEM is supported via OpenSSL 3.5+.
|
||||
|
||||
- The wolfssl plugin is now compatible to wolfSSL's FIPS module.
|
||||
|
||||
- The libsoup plugin has been migrated to libsoup 3, libsoup 2 is not supported
|
||||
anymore.
|
||||
|
||||
- The long defunct uci plugin has been removed.
|
||||
|
||||
- Log messages by watcher_t are now logged in a separate log group (`wch`).
|
||||
|
||||
|
||||
strongswan-6.0.1
|
||||
----------------
|
||||
|
||||
- The ha plugin supports IKE and Child SAs with multiple key exchanges.
|
||||
Incomplete IKE_SAs are now destroyed during a failover.
|
||||
|
||||
- The new `interface_receive` option for the dhcp plugin allows binding the
|
||||
receive socket to a different interface than the send socket. Also fixed a
|
||||
regression if the DHCP server is running on the same host.
|
||||
|
||||
- The new `source` option for the eap-radius plugin allows sending RADIUS
|
||||
messages from a specific IP address.
|
||||
|
||||
- Self-signed root CAs without policies are now excluded from policy validation.
|
||||
|
||||
- Inbound traffic on IPsec SAs is now ignored when sending DPDs unless
|
||||
UDP-encapsulation is used.
|
||||
|
||||
- Send IKE_SA_INIT from NAT-T socket if not connecting to port 500.
|
||||
|
||||
- Local traffic selectors can be configured for charon-nm. Its default
|
||||
retransmission settings have been set to those of the Android app.
|
||||
|
||||
- The vici Python wheel is now built via `build` frontend instead of calling
|
||||
setup.py directly if --enable-python-wheels is used (the option to build eggs
|
||||
has been removed). There is no option to automatically install the wheel (use
|
||||
pip instead) and the --enable-python-eggs-install option has been removed.
|
||||
|
||||
|
||||
strongswan-6.0.0
|
||||
----------------
|
||||
|
||||
- Support of multiple post-quantum (and classic) key exchanges using the
|
||||
IKE_INTERMEDIATE exchange (RFC 9242) and the Additional Key Exchange
|
||||
transform types 1..7 (RFC 9370).
|
||||
|
||||
- ML-KEM is provided by the botan, wolfssl, openssl (only via AWS-LC) and the
|
||||
new ml plugins.
|
||||
|
||||
- Handling of CHILD_SA rekey collisions has been improved, which makes CHILD_SAs
|
||||
properly trackable via chiled_rekey() hook.
|
||||
|
||||
- The behavior when reloading or unloading connections that include `start` in
|
||||
their `start_action` has been improved.
|
||||
|
||||
- The default identity is now the subject DN instead of the IP address if a
|
||||
certificate is available.
|
||||
|
||||
- The file logger supports logging as JSON objects and can add timestamps
|
||||
in microseconds.
|
||||
|
||||
- The cert-enroll script now supports three generations of CA certificates.
|
||||
|
||||
- charon-nm uses a different routing table than the regular IKE daemon to avoid
|
||||
conflicts if both are running.
|
||||
|
||||
- AF_VSOCK sockets are supported on Linux to communicate with a daemon that runs
|
||||
in a VM.
|
||||
|
||||
- TUN devices can properly handle IPv6 addresses.
|
||||
|
||||
- For compatibility with older SCEP implementations, challenge passwords in
|
||||
PKCS#10 containers are again encoded as PrintableString if possible.
|
||||
|
||||
- The legacy stroke plugin is no longer enabled by default.
|
||||
|
||||
- The openssl plugin is now enabled by default, while the following crypto
|
||||
plugins are no longer enabled by default: aes, curve25519, des, fips-prf, gmp,
|
||||
hmac, md5, pkcs12, rc2, sha1, sha2.
|
||||
|
||||
- The following deprecated plugins have been removed: bliss, newhope, ntru.
|
||||
|
||||
- charon.make_before_break is now enabled by default.
|
||||
|
||||
|
||||
strongswan-5.9.14
|
||||
-----------------
|
||||
|
||||
- Support for the IKEv2 OCSP extensions (RFC 4806) has been added, which allows
|
||||
peers to request and send OCSP responses directly in IKEv2.
|
||||
|
||||
- Validation of X.509 name constraints in the constraints plugin has been
|
||||
refactored to align with RFC 5280.
|
||||
|
||||
- The dhcp plugin has been ported to FreeBSD/macOS.
|
||||
|
||||
- The openssl plugin is now compatible with AWS-LC.
|
||||
|
||||
- Overflows of unique identifiers (e.g. Netlink sequence numbers or reqids) are
|
||||
now handled gracefully.
|
||||
|
||||
- Updated the pkcs11.h header based on the latest OpenSC version in order to
|
||||
include new algorithm and struct definitions for the pkcs11 plugin .
|
||||
include new algorithm and struct definitions for the pkcs11 plugin.
|
||||
Added support for PSS padding in smartcard-based RSA signatures using either
|
||||
on-chip or external data hashing.
|
||||
|
||||
- Added keyid and certid handles in the pki --ocsp command so that keys and/or
|
||||
certificates can stored on a smartcard or in a TPM 2.0 device.
|
||||
certificates can be stored on a smartcard or in a TPM 2.0 device.
|
||||
|
||||
- Fail SA installation on Linux if replay protection is disabled while ESN is
|
||||
enabled, which the kernel currently doesn't support.
|
||||
|
||||
|
||||
strongswan-5.9.13
|
||||
@ -346,7 +476,7 @@ strongswan-5.9.4
|
||||
salt lengths.
|
||||
This vulnerability has been registered as CVE-2021-41990.
|
||||
|
||||
- Fixed a denial-of-service vulnerabililty in the in-memory certificate cache
|
||||
- Fixed a denial-of-service vulnerability in the in-memory certificate cache
|
||||
if certificates are replaced and a very large random value caused an integer
|
||||
overflow.
|
||||
This vulnerability has been registered as CVE-2021-41991.
|
||||
@ -1758,7 +1888,7 @@ strongswan-5.0.3
|
||||
PT-TLS (RFC 6876), a Posture Transport Protocol over TLS.
|
||||
|
||||
- The charon systime-fix plugin can disable certificate lifetime checks on
|
||||
embedded systems if the system time is obviously out of sync after bootup.
|
||||
embedded systems if the system time is obviously out of sync after boot-up.
|
||||
Certificates lifetimes get checked once the system time gets sane, closing
|
||||
or reauthenticating connections using expired certificates.
|
||||
|
||||
|
@ -566,7 +566,7 @@ to generate a traditional 3072 bit RSA key and store it in binary DER format.
|
||||
As an alternative a **TPM 2.0** *Trusted Platform Module* available on every
|
||||
recent Intel platform could be used as a virtual smartcard to securely store an
|
||||
RSA or ECDSA private key. For details, refer to the TPM 2.0
|
||||
[HOWTO](https://docs.strongswan.org/docs/5.9/tpm/tpm2.html).
|
||||
[HOWTO](https://docs.strongswan.org/docs/latest/tpm/tpm2.html).
|
||||
|
||||
In a next step the command
|
||||
|
||||
|
@ -16,6 +16,7 @@ options = \
|
||||
options/charon-systemd.opt \
|
||||
options/imcv.opt \
|
||||
options/imv_policy_manager.opt \
|
||||
options/iptfs.opt \
|
||||
options/manager.opt \
|
||||
options/medsrv.opt \
|
||||
options/pki.opt \
|
||||
@ -31,7 +32,6 @@ plugins = \
|
||||
plugins/android_log.opt \
|
||||
plugins/attr.opt \
|
||||
plugins/attr-sql.opt \
|
||||
plugins/bliss.opt \
|
||||
plugins/botan.opt \
|
||||
plugins/bypass-lan.opt \
|
||||
plugins/certexpire.opt \
|
||||
@ -77,7 +77,6 @@ plugins = \
|
||||
plugins/kernel-pfroute.opt \
|
||||
plugins/load-tester.opt \
|
||||
plugins/lookip.opt \
|
||||
plugins/ntru.opt \
|
||||
plugins/openssl.opt \
|
||||
plugins/openxpki.opt \
|
||||
plugins/osx-attr.opt \
|
||||
|
@ -55,14 +55,6 @@ man pages) the following format can be used:
|
||||
|
||||
full.section.name.include files/to/include
|
||||
Description of this include statement
|
||||
|
||||
Dots in section/option names may be escaped with a backslash. For instance,
|
||||
with the following section description
|
||||
|
||||
charon.filelog./var/log/daemon\.log {}
|
||||
Section to define logging into /var/log/daemon.log
|
||||
|
||||
/var/log/daemon.log will be the name of the last section.
|
||||
"""
|
||||
|
||||
import sys
|
||||
@ -74,10 +66,10 @@ from functools import cmp_to_key, total_ordering
|
||||
@total_ordering
|
||||
class ConfigOption:
|
||||
"""Representing a configuration option or described section in strongswan.conf"""
|
||||
def __init__(self, path, default = None, section = False, commented = False, include = False):
|
||||
self.path = path
|
||||
self.name = path[-1]
|
||||
self.fullname = '.'.join(path)
|
||||
def __init__(self, fullname, default = None, section = False, commented = False, include = False):
|
||||
self.path = fullname.split('.')
|
||||
self.name = self.path[-1]
|
||||
self.fullname = fullname
|
||||
self.default = default
|
||||
self.section = section
|
||||
self.commented = commented
|
||||
@ -141,8 +133,7 @@ class Parser:
|
||||
if m:
|
||||
if self.__current:
|
||||
self.__add_option(self.__current)
|
||||
path = self.__split_name(m.group('name'))
|
||||
self.__current = ConfigOption(path, m.group('default'),
|
||||
self.__current = ConfigOption(m.group('name'), m.group('default'),
|
||||
commented = not m.group('assign'))
|
||||
return
|
||||
# section definition
|
||||
@ -150,8 +141,7 @@ class Parser:
|
||||
if m:
|
||||
if self.__current:
|
||||
self.__add_option(self.__current)
|
||||
path = self.__split_name(m.group('name'))
|
||||
self.__current = ConfigOption(path, section = True,
|
||||
self.__current = ConfigOption(m.group('name'), section = True,
|
||||
commented = m.group('comment'))
|
||||
return
|
||||
# include definition
|
||||
@ -159,8 +149,7 @@ class Parser:
|
||||
if m:
|
||||
if self.__current:
|
||||
self.__add_option(self.__current)
|
||||
path = self.__split_name(m.group('name'))
|
||||
self.__current = ConfigOption(path, m.group('pattern'), include = True)
|
||||
self.__current = ConfigOption(m.group('name'), m.group('pattern'), include = True)
|
||||
return
|
||||
# paragraph separator
|
||||
m = re.match(r'^\s*$', line)
|
||||
@ -171,10 +160,6 @@ class Parser:
|
||||
if m and self.__current:
|
||||
self.__current.add(m.group('text'))
|
||||
|
||||
def __split_name(self, name):
|
||||
"""Split the given full name in a list of section/option names"""
|
||||
return [x.replace('\.', '.') for x in re.split(r'(?<!\\)\.', name)]
|
||||
|
||||
def __add_option(self, option):
|
||||
"""Adds the given option to the abstract storage"""
|
||||
option.desc = [desc for desc in option.desc if len(desc)]
|
||||
@ -194,12 +179,14 @@ class Parser:
|
||||
"""Searches/Creates the option (section) based on a list of section names"""
|
||||
option = None
|
||||
options = self.options
|
||||
for i, name in enumerate(path, 1):
|
||||
fullname = ""
|
||||
for name in path:
|
||||
fullname += '.' + name if len(fullname) else name
|
||||
option = next((x for x in options if x.name == name and x.section), None)
|
||||
if not option:
|
||||
if not create:
|
||||
break
|
||||
option = ConfigOption(path[:i], section = True)
|
||||
option = ConfigOption(fullname, section = True)
|
||||
options.append(option)
|
||||
if self.sort:
|
||||
options.sort()
|
||||
@ -208,7 +195,7 @@ class Parser:
|
||||
|
||||
def get_option(self, name):
|
||||
"""Retrieves the option with the given name"""
|
||||
return self.__get_option(self.__split_name(name))
|
||||
return self.__get_option(name.split('.'))
|
||||
|
||||
class TagReplacer:
|
||||
"""Replaces formatting tags in text"""
|
||||
@ -254,6 +241,7 @@ class GroffTagReplacer(TagReplacer):
|
||||
if not punct:
|
||||
punct = ''
|
||||
text = re.sub(r'[\r\n\t]', ' ', m.group('text'))
|
||||
text = re.sub(r'"', '""', text)
|
||||
return '{0}.R{1} "{2}" "{3}" "{4}"\n'.format(nl, format, brack, text, punct)
|
||||
return replacer
|
||||
|
||||
@ -318,7 +306,8 @@ class ManFormatter:
|
||||
def __groffize(self, text):
|
||||
"""Encode text as groff text"""
|
||||
text = self.__tags.replace(text)
|
||||
text = re.sub(r'(?<!\\)-', r'\\-', text)
|
||||
text = re.sub(r'\\(?!-)', '\\[rs]', text)
|
||||
text = re.sub(r'(?<!\\)-', '\\-', text)
|
||||
# remove any leading whitespace
|
||||
return re.sub(r'^\s+', '', text, flags = re.MULTILINE)
|
||||
|
||||
|
@ -26,8 +26,18 @@ charon.filelog.<name>.flush_line = no
|
||||
Enabling this option disables block buffering and enables line buffering.
|
||||
|
||||
charon.filelog.<name>.ike_name = no
|
||||
Prefix each log entry with the connection name and a unique numerical
|
||||
identifier for each IKE_SA.
|
||||
Add the connection name and a unique numerical identifier for the current
|
||||
IKE_SA to each log entry if available.
|
||||
|
||||
charon.filelog.<name>.json = no
|
||||
If enabled, each log entry is written to the file as a JSON object.
|
||||
|
||||
Enables writing each log entry as a JSON object to the file. The properties
|
||||
are "time" (if `time_format` is set), "thread", "group", "level" and "msg".
|
||||
Newlines, double quotes and backslashes are escaped in the latter. If
|
||||
`ike_name` is enabled, "ikesa-uniqueid" and "ikesa-name" are added to the
|
||||
object if available. The `log_level` option does not apply if this is
|
||||
enabled.
|
||||
|
||||
charon.filelog.<name>.log_level = no
|
||||
Add the log level of each message after the subsystem (e.g. [IKE2]).
|
||||
@ -36,9 +46,10 @@ charon.filelog.<name>.time_format
|
||||
Prefix each log entry with a timestamp. The option accepts a format string
|
||||
as passed to **strftime**(3).
|
||||
|
||||
charon.filelog.<name>.time_add_ms = no
|
||||
Adds the milliseconds within the current second after the timestamp
|
||||
(separated by a dot, so _time_format_ should end with %S or %T).
|
||||
charon.filelog.<name>.time_precision =
|
||||
Add the milliseconds (_ms_) or microseconds (_us_) within the current second
|
||||
after the timestamp (separated by a dot, so _time_format_ should end
|
||||
with %S or %T). By default, nothing is added.
|
||||
|
||||
charon.syslog {}
|
||||
Section to define syslog loggers, see LOGGER CONFIGURATION in
|
||||
|
@ -1,6 +1,55 @@
|
||||
charon-nm {}
|
||||
Section with settings specific to the NetworkManager backend `charon-nm`.
|
||||
Settings from the `charon` section are not inherited, but many can be used
|
||||
here as well. Defaults for some settings are chosen very deliberately and
|
||||
should only be changed in case of conflicts.
|
||||
|
||||
charon-nm.ca_dir = <default>
|
||||
Directory from which to load CA certificates if no certificate is
|
||||
configured.
|
||||
|
||||
charon-nm.install_virtual_ip_on = lo
|
||||
Interface on which virtual IP addresses are installed. Note that NM
|
||||
also installs the virtual IPs on the XFRM interface.
|
||||
|
||||
charon-nm.mtu = 1400
|
||||
MTU for XFRM interfaces created by the NM plugin.
|
||||
|
||||
charon-nm.port = 0
|
||||
Source port when sending packets to port 500. Defaults to an ephemeral
|
||||
port. May be set to 500 if firewall rules require a static port.
|
||||
|
||||
charon-nm.port_nat_t = 0
|
||||
Source port when sending packets to port 4500 or a custom server port.
|
||||
Defaults to an ephemeral port. May be set to e.g. 4500 if firewall rules
|
||||
require a static port.
|
||||
|
||||
charon-nm.retransmit_base = 1.4
|
||||
Base to use for calculating exponential back off, see IKEv2 RETRANSMISSION
|
||||
in **strongswan.conf**(5). Default retransmission settings for charon-nm are
|
||||
deliberately lower to fail and possibly reestablish SAs more quickly.
|
||||
|
||||
charon-nm.retransmit_timeout = 2.0
|
||||
Timeout in seconds before sending first retransmit.
|
||||
|
||||
charon-nm.retransmit_tries = 3
|
||||
Number of times to retransmit a packet before giving up.
|
||||
|
||||
charon-nm.routing_table = 210
|
||||
Table where routes via XFRM interface are installed. Should be different
|
||||
than the table used for the regular IKE daemon due to the mark.
|
||||
|
||||
charon-nm.routing_table_prio = 210
|
||||
Priority of the routing table. Higher than the default priority used for the
|
||||
regular IKE daemon.
|
||||
|
||||
charon-nm.plugins.kernel-netlink.fwmark = !210
|
||||
Make packets with this mark ignore the routing table. Must be the same mark
|
||||
set in charon-nm.plugins.socket-default.fwmark.
|
||||
|
||||
charon-nm.plugins.socket-default.fwmark = 210
|
||||
Mark applied to IKE and ESP packets to ignore the routing table and avoid
|
||||
routing loops when using XFRM interfaces.
|
||||
|
||||
charon-nm.syslog.daemon.default = 1
|
||||
Default to logging via syslog's daemon facility on level 1.
|
||||
|
@ -154,8 +154,16 @@ charon.fragment_size = 1280
|
||||
Maximum size (complete IP datagram size in bytes) of a sent IKE fragment
|
||||
when using proprietary IKEv1 or standardized IKEv2 fragmentation, defaults
|
||||
to 1280 (use 0 for address family specific default values, which uses a
|
||||
lower value for IPv4). If specified this limit is used for both IPv4 and
|
||||
IPv6.
|
||||
lower value for IPv4). Unless overridden, this limit is used for both IPv4
|
||||
and IPv6 if specified.
|
||||
|
||||
charon.fragment_size_v4 = charon.fragment_size
|
||||
Maximum size (complete IPv4 datagram size in bytes) of a sent IKE fragment
|
||||
when using proprietary IKEv1 or standardized IKEv2 fragmentation.
|
||||
|
||||
charon.fragment_size_v6 = charon.fragment_size
|
||||
Maximum size (complete IPv6 datagram size in bytes) of a sent IKE fragment
|
||||
when using proprietary IKEv1 or standardized IKEv2 fragmentation.
|
||||
|
||||
charon.group
|
||||
Name of the group the daemon changes to after startup.
|
||||
@ -283,7 +291,7 @@ charon.max_ikev1_exchanges = 3
|
||||
charon.max_packet = 10000
|
||||
Maximum packet size accepted by charon.
|
||||
|
||||
charon.make_before_break = no
|
||||
charon.make_before_break = yes
|
||||
Initiate IKEv2 reauthentication with a make-before-break scheme.
|
||||
|
||||
Initiate IKEv2 reauthentication with a make-before-break instead of a
|
||||
|
38
conf/options/iptfs.opt
Normal file
38
conf/options/iptfs.opt
Normal file
@ -0,0 +1,38 @@
|
||||
charon.iptfs {}
|
||||
Global settings for IP-TFS (RFC 9347). The Linux kernel supports this mode
|
||||
since 6.14. However, it currently only supports aggregation/fragmentation of
|
||||
tunneled IP packets in ESP/AGGFRAG packets. It doesn't yet support other
|
||||
IP-TFS features like sending packets at a constant rate or congestion control.
|
||||
|
||||
charon.iptfs.drop_time = 1000000
|
||||
Time in microseconds to wait for out-of-order packets when processing
|
||||
inbound traffic.
|
||||
|
||||
charon.iptfs.reorder_window = 3
|
||||
Number of packets that may arrive out of order when processing inbound
|
||||
traffic.
|
||||
|
||||
charon.iptfs.init_delay = 0
|
||||
Time in microseconds to wait for subsequent packets to aggregate together
|
||||
when sending outbound traffic. Only relevant if no packets are already
|
||||
queued to be sent.
|
||||
|
||||
charon.iptfs.max_queue_size = 1048576
|
||||
Maximum number of bytes allowed to be queued for sending on the tunnel
|
||||
(default 1 MiB). If the queue is full, packets are dropped.
|
||||
|
||||
charon.iptfs.packet_size = 0
|
||||
Maximum outer packet size (layer 3) when sending packets. The default of 0
|
||||
will use the PMTU as packet size. Note that the kernel currently doesn't
|
||||
pad smaller packets.
|
||||
|
||||
charon.iptfs.accept_fragments = yes
|
||||
Whether fragments of inner packets across multiple AGGFRAG payloads are
|
||||
accepted. This is an IKEv2 option, so if the peer doesn't adhere to this
|
||||
request and still sends such fragments, they will be processed by the
|
||||
kernel.
|
||||
|
||||
charon.iptfs.dont_frag = no
|
||||
Force disabling fragmenting inner packets across multiple AGGFRAG payloads
|
||||
when sending outbound traffic (fragmentation is automatically disabled if
|
||||
the peer indicates that it doesn't support handling such packets).
|
@ -1,2 +0,0 @@
|
||||
charon.plugins.bliss.use_bliss_b = yes
|
||||
Use the enhanced BLISS-B key generation and signature algorithm.
|
@ -36,3 +36,13 @@ charon.plugins.dhcp.interface
|
||||
Interface name the plugin uses for address allocation. The default is to
|
||||
bind to any (0.0.0.0) and let the system decide which way to route the
|
||||
packets to the DHCP server.
|
||||
|
||||
charon.plugins.dhcp.interface_receive = charon.plugins.dhcp.interface
|
||||
Interface name the plugin uses to bind its receive socket.
|
||||
|
||||
Interface name the plugin uses to bind its receive socket. The default is
|
||||
to use the same interface as the send socket. Set it to the empty string
|
||||
to avoid binding the receive socket to any interface while the send socket
|
||||
is bound to one. If the server runs on the same host and the send socket is
|
||||
bound to an interface, it might be necessary to set this to `lo` or the
|
||||
empty string.
|
||||
|
@ -84,6 +84,9 @@ charon.plugins.eap-radius.secret =
|
||||
charon.plugins.eap-radius.server =
|
||||
IP/Hostname of RADIUS server.
|
||||
|
||||
charon.plugins.eap-radius.source =
|
||||
Optional specific source IP to use.
|
||||
|
||||
charon.plugins.eap-radius.retransmit_base = 1.4
|
||||
Base to use for calculating exponential back off.
|
||||
|
||||
@ -96,12 +99,12 @@ charon.plugins.eap-radius.retransmit_tries = 4
|
||||
charon.plugins.eap-radius.servers {}
|
||||
Section to specify multiple RADIUS servers.
|
||||
|
||||
Section to specify multiple RADIUS servers. The **nas_identifier**,
|
||||
**secret**, **sockets** and **port** (or **auth_port**) options can be
|
||||
specified for each server. A server's IP/Hostname can be configured using
|
||||
the **address** option. The **acct_port** [1813] option can be used to
|
||||
specify the port used for RADIUS accounting. For each RADIUS server a
|
||||
priority can be specified using the **preference** [0] option. The
|
||||
Section to specify multiple RADIUS servers. The **source**,
|
||||
**nas_identifier**, **secret**, **sockets** and **port** (or **auth_port**)
|
||||
options can be specified for each server. A server's IP/Hostname can be
|
||||
configured using the **address** option. The **acct_port** [1813] option can
|
||||
be used to specify the port used for RADIUS accounting. For each RADIUS
|
||||
server a priority can be specified using the **preference** [0] option. The
|
||||
retransmission time for each server can set set using **retransmit_base**,
|
||||
**retransmit_timeout** and **retransmit_tries**.
|
||||
|
||||
|
@ -1,4 +0,0 @@
|
||||
charon.plugins.ntru.parameter_set = optimum
|
||||
The following parameter sets are available: **x9_98_speed**,
|
||||
**x9_98_bandwidth**, **x9_98_balance** and **optimum**, the last set not
|
||||
being part of the X9.98 standard but having the best performance.
|
145
configure.ac
145
configure.ac
@ -20,7 +20,7 @@
|
||||
# initialize & set some vars
|
||||
# ============================
|
||||
|
||||
AC_INIT([strongSwan],[5.9.14dr1])
|
||||
AC_INIT([strongSwan],[6.0.2])
|
||||
AM_INIT_AUTOMAKE(m4_esyscmd([
|
||||
echo tar-ustar
|
||||
echo subdir-objects
|
||||
@ -33,21 +33,18 @@ AM_INIT_AUTOMAKE(m4_esyscmd([
|
||||
esac
|
||||
]))
|
||||
m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES])
|
||||
AC_CONFIG_MACRO_DIR([m4/config])
|
||||
AC_CONFIG_MACRO_DIRS([m4/config m4/macros])
|
||||
AC_CONFIG_HEADERS([config.h])
|
||||
AC_DEFINE([CONFIG_H_INCLUDED], [], [defined if config.h included])
|
||||
AC_DISABLE_STATIC
|
||||
PKG_PROG_PKG_CONFIG
|
||||
|
||||
m4_include(m4/macros/split-package-version.m4)
|
||||
SPLIT_PACKAGE_VERSION
|
||||
|
||||
# =================================
|
||||
# check --enable-xxx & --with-xxx
|
||||
# =================================
|
||||
|
||||
m4_include(m4/macros/with.m4)
|
||||
|
||||
ARG_WITH_SUBST([random-device], [/dev/random], [set the device to read real random data from])
|
||||
ARG_WITH_SUBST([urandom-device], [/dev/urandom], [set the device to read pseudo random data from])
|
||||
ARG_WITH_SUBST([strongswan-conf], [${sysconfdir}/strongswan.conf], [set the strongswan.conf file location])
|
||||
@ -70,7 +67,7 @@ ARG_WITH_SET([mpz_powm_sec], [yes], [use the more side-channel resistant
|
||||
ARG_WITH_SET([dev-headers], [no], [install strongSwan development headers to directory.])
|
||||
ARG_WITH_SET([printf-hooks], [auto], [force the use of a specific printf hook implementation (auto, builtin, glibc, vstr).])
|
||||
ARG_WITH_SET([rubygemdir], ["gem environment gemdir"], [path to install ruby gems to])
|
||||
ARG_WITH_SET([pythoneggdir], ["main site-packages directory"], [path to install python eggs to to])
|
||||
ARG_WITH_SET([testable-ke], [yes], [make key exchange implementations testable by providing a set_seed() method])
|
||||
|
||||
if test -n "$PKG_CONFIG"; then
|
||||
systemdsystemunitdir_default=$($PKG_CONFIG --variable=systemdsystemunitdir systemd)
|
||||
@ -129,42 +126,38 @@ fi
|
||||
# convert script name to uppercase
|
||||
AC_SUBST(ipsec_script_upper, [`echo -n "$ipsec_script" | tr a-z A-Z`])
|
||||
|
||||
m4_include(m4/macros/enable-disable.m4)
|
||||
|
||||
# crypto plugins
|
||||
ARG_DISBL_SET([aes], [disable AES software implementation plugin.])
|
||||
ARG_ENABL_SET([aes], [enable AES software implementation plugin.])
|
||||
ARG_ENABL_SET([af-alg], [enable AF_ALG crypto interface to Linux Crypto API.])
|
||||
ARG_ENABL_SET([bliss], [enable BLISS software implementation plugin.])
|
||||
ARG_ENABL_SET([blowfish], [enable Blowfish software implementation plugin.])
|
||||
ARG_ENABL_SET([botan], [enables the Botan crypto plugin.])
|
||||
ARG_ENABL_SET([ccm], [enables the CCM AEAD wrapper crypto plugin.])
|
||||
ARG_ENABL_SET([chapoly], [enables the ChaCha20/Poly1305 AEAD plugin.])
|
||||
ARG_DISBL_SET([cmac], [disable CMAC crypto implementation plugin.])
|
||||
ARG_ENABL_SET([ctr], [enables the Counter Mode wrapper crypto plugin.])
|
||||
ARG_DISBL_SET([des], [disable DES/3DES software implementation plugin.])
|
||||
ARG_ENABL_SET([des], [enable DES/3DES software implementation plugin.])
|
||||
ARG_DISBL_SET([drbg], [disable the NIST Deterministic Random Bit Generator plugin.])
|
||||
ARG_DISBL_SET([fips-prf], [disable FIPS PRF software implementation plugin.])
|
||||
ARG_DISBL_SET([gcm], [disable the GCM AEAD wrapper crypto plugin.])
|
||||
ARG_ENABL_SET([fips-prf], [enable FIPS PRF software implementation plugin.])
|
||||
ARG_ENABL_SET([gcm], [enable the GCM AEAD wrapper crypto plugin.])
|
||||
ARG_ENABL_SET([gcrypt], [enables the libgcrypt plugin.])
|
||||
ARG_DISBL_SET([gmp], [disable GNU MP (libgmp) based crypto implementation plugin.])
|
||||
ARG_DISBL_SET([curve25519], [disable Curve25519 Diffie-Hellman plugin.])
|
||||
ARG_DISBL_SET([hmac], [disable HMAC crypto implementation plugin.])
|
||||
ARG_ENABL_SET([gmp], [enable GNU MP (libgmp) based crypto implementation plugin.])
|
||||
ARG_ENABL_SET([curve25519], [enable Curve25519 Diffie-Hellman plugin.])
|
||||
ARG_ENABL_SET([hmac], [enable HMAC crypto implementation plugin.])
|
||||
ARG_DISBL_SET([kdf], [disable KDF (prf+) implementation plugin.])
|
||||
ARG_ENABL_SET([md4], [enable MD4 software implementation plugin.])
|
||||
ARG_DISBL_SET([md5], [disable MD5 software implementation plugin.])
|
||||
ARG_ENABL_SET([md5], [enable MD5 software implementation plugin.])
|
||||
ARG_ENABL_SET([mgf1], [enable the MGF1 software implementation plugin.])
|
||||
ARG_ENABL_SET([newhope], [enable New Hope crypto plugin.])
|
||||
ARG_ENABL_SET([ml], [enable Module-Lattice-based crypto (ML-KEM) plugin.])
|
||||
ARG_DISBL_SET([nonce], [disable nonce generation plugin.])
|
||||
ARG_ENABL_SET([ntru], [enables the NTRU crypto plugin.])
|
||||
ARG_ENABL_SET([openssl], [enables the OpenSSL crypto plugin.])
|
||||
ARG_DISBL_SET([openssl], [disable the OpenSSL crypto plugin.])
|
||||
ARG_ENABL_SET([wolfssl], [enables the wolfSSL crypto plugin.])
|
||||
ARG_ENABL_SET([padlock], [enables VIA Padlock crypto plugin.])
|
||||
ARG_DISBL_SET([random], [disable RNG implementation on top of /dev/(u)random.])
|
||||
ARG_DISBL_SET([rc2], [disable RC2 software implementation plugin.])
|
||||
ARG_ENABL_SET([rc2], [enable RC2 software implementation plugin.])
|
||||
ARG_ENABL_SET([rdrand], [enable Intel RDRAND random generator plugin.])
|
||||
ARG_ENABL_SET([aesni], [enable Intel AES-NI crypto plugin.])
|
||||
ARG_DISBL_SET([sha1], [disable SHA1 software implementation plugin.])
|
||||
ARG_DISBL_SET([sha2], [disable SHA256/SHA384/SHA512 software implementation plugin.])
|
||||
ARG_ENABL_SET([sha1], [enable SHA1 software implementation plugin.])
|
||||
ARG_ENABL_SET([sha2], [enable SHA256/SHA384/SHA512 software implementation plugin.])
|
||||
ARG_ENABL_SET([sha3], [enable SHA3_224/SHA3_256/SHA3_384/SHA3_512 software implementation plugin.])
|
||||
ARG_DISBL_SET([xcbc], [disable xcbc crypto implementation plugin.])
|
||||
# encoding/decoding plugins
|
||||
@ -174,7 +167,7 @@ ARG_DISBL_SET([pgp], [disable PGP key decoding plugin.])
|
||||
ARG_DISBL_SET([pkcs1], [disable PKCS1 key decoding plugin.])
|
||||
ARG_DISBL_SET([pkcs7], [disable PKCS7 container support plugin.])
|
||||
ARG_DISBL_SET([pkcs8], [disable PKCS8 private key decoding plugin.])
|
||||
ARG_DISBL_SET([pkcs12], [disable PKCS12 container support plugin.])
|
||||
ARG_ENABL_SET([pkcs12], [enable PKCS12 container support plugin.])
|
||||
ARG_DISBL_SET([pubkey], [disable RAW public key support plugin.])
|
||||
ARG_DISBL_SET([sshkey], [disable SSH key decoding plugin.])
|
||||
ARG_DISBL_SET([x509], [disable X509 certificate implementation plugin.])
|
||||
@ -237,10 +230,9 @@ ARG_DISBL_SET([socket-default], [disable default socket implementation for charo
|
||||
ARG_ENABL_SET([socket-dynamic], [enable dynamic socket implementation for charon])
|
||||
ARG_ENABL_SET([socket-win], [enable Winsock2 based socket implementation for charon])
|
||||
# configuration/control plugins
|
||||
ARG_DISBL_SET([stroke], [disable charons stroke configuration backend.])
|
||||
ARG_ENABL_SET([stroke], [enable the stroke configuration backend.])
|
||||
ARG_ENABL_SET([smp], [enable SMP configuration and control interface. Requires libxml.])
|
||||
ARG_ENABL_SET([sql], [enable SQL database configuration backend.])
|
||||
ARG_ENABL_SET([uci], [enable OpenWRT UCI configuration plugin.])
|
||||
ARG_DISBL_SET([vici], [disable strongSwan IKE generic IPC interface plugin.])
|
||||
# attribute provider/consumer plugins
|
||||
ARG_ENABL_SET([android-dns], [enable Android specific DNS handler.])
|
||||
@ -320,8 +312,8 @@ ARG_ENABL_SET([mediation], [enable IKEv2 Mediation Extension.])
|
||||
ARG_ENABL_SET([unwind-backtraces],[use libunwind to create backtraces for memory leaks and segfaults.])
|
||||
ARG_ENABL_SET([ruby-gems], [enable build of provided ruby gems.])
|
||||
ARG_ENABL_SET([ruby-gems-install],[enable installation of provided ruby gems.])
|
||||
ARG_ENABL_SET([python-eggs], [enable build of provided python eggs.])
|
||||
ARG_ENABL_SET([python-eggs-install],[enable installation of provided python eggs.])
|
||||
ARG_ENABL_SET([python-wheels], [enable build of provided python wheels.])
|
||||
ARG_ENABL_SET([python-eggs], [legacy alias for --enable-python-wheels.])
|
||||
ARG_ENABL_SET([perl-cpan], [enable build of provided perl CPAN module.])
|
||||
ARG_ENABL_SET([perl-cpan-install],[enable installation of provided CPAN module.])
|
||||
ARG_ENABL_SET([selinux], [enable SELinux support for labeled IPsec.])
|
||||
@ -470,6 +462,10 @@ if test x$fips_prf = xtrue; then
|
||||
fi
|
||||
fi
|
||||
|
||||
if test x$pkcs12 = xtrue; then
|
||||
rc2=true;
|
||||
fi
|
||||
|
||||
if test x$swanctl = xtrue; then
|
||||
vici=true
|
||||
fi
|
||||
@ -495,8 +491,8 @@ if test x$ruby_gems_install = xtrue; then
|
||||
ruby_gems=true
|
||||
fi
|
||||
|
||||
if test x$python_eggs_install = xtrue; then
|
||||
python_eggs=true
|
||||
if test x$python_eggs = xtrue; then
|
||||
python_wheels=true
|
||||
fi
|
||||
|
||||
if test x$perl_cpan_install = xtrue; then
|
||||
@ -511,11 +507,11 @@ if test x$tpm = xtrue; then
|
||||
tss_tss2=true
|
||||
fi
|
||||
|
||||
if test x$gmp = xtrue -o x$ntru = xtrue -o x$bliss = xtrue; then
|
||||
if test x$gmp = xtrue; then
|
||||
mgf1=true
|
||||
fi
|
||||
|
||||
if test x$stroke = xtrue; then
|
||||
if test x$stroke = xtrue -o x$vici = xtrue; then
|
||||
counters=true
|
||||
fi
|
||||
|
||||
@ -524,15 +520,28 @@ if test x$cert_enroll = xtrue; then
|
||||
fi
|
||||
|
||||
if test x$kdf = xfalse; then
|
||||
openssl_hkdf=false
|
||||
if test x$openssl = xtrue; then
|
||||
AC_MSG_CHECKING(for OpenSSL >= 3.0 for HKDF)
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_PROGRAM(
|
||||
[[#include <openssl/opensslv.h>]],
|
||||
[[#if OPENSSL_VERSION_NUMBER < 0x30000000L && !defined(OPENSSL_IS_AWSLC)
|
||||
#error OpenSSL version unusable
|
||||
#endif]])],
|
||||
[AC_MSG_RESULT([yes]); openssl_hkdf=true],
|
||||
[AC_MSG_RESULT([no])]
|
||||
)
|
||||
fi
|
||||
if test x$aesni = xtrue -o x$cmac = xtrue -o x$xcbc = xtrue; then
|
||||
AC_MSG_WARN(m4_normalize([
|
||||
kdf plugin is required for possible use of PRF_AES128_XCBC/CMAC
|
||||
by one of these plugins: aesni, cmac, xcbc]))
|
||||
kdf=true
|
||||
elif test x$botan = xfalse -a x$openssl = xfalse -a x$wolfssl = xfalse; then
|
||||
elif test x$botan = xfalse -a x$openssl_hkdf = xfalse -a x$wolfssl = xfalse; then
|
||||
AC_MSG_WARN(m4_normalize([
|
||||
kdf plugin is required because none of the following plugins is
|
||||
enabled: botan, openssl, wolfssl]))
|
||||
enabled or usable: botan, openssl, wolfssl]))
|
||||
kdf=true
|
||||
fi
|
||||
fi
|
||||
@ -595,6 +604,10 @@ AC_LINK_IFELSE(
|
||||
AC_SUBST(ATOMICLIB)
|
||||
|
||||
LIBS=$saved_LIBS
|
||||
|
||||
# Some platforms require explicit linking to use POSIX regular expressions
|
||||
AC_SEARCH_LIBS([regcomp], [regex], [AC_DEFINE([HAVE_REGEX], [], [have regcomp() etc.])])
|
||||
|
||||
# ------------------------------------------------------
|
||||
|
||||
AC_MSG_CHECKING(for dladdr)
|
||||
@ -713,6 +726,11 @@ AC_CHECK_HEADERS([netinet/ip6.h linux/fib_rules.h], [], [],
|
||||
#include <sys/types.h>
|
||||
#include <netinet/in.h>
|
||||
])
|
||||
AC_CHECK_HEADERS([linux/vm_sockets.h], [have_vm_sockets=true], [],
|
||||
[
|
||||
#include <sys/socket.h>
|
||||
])
|
||||
AM_CONDITIONAL(USE_VM_SOCKETS, [test "x$have_vm_sockets" = xtrue])
|
||||
|
||||
AC_CHECK_MEMBERS([struct sockaddr.sa_len], [], [],
|
||||
[
|
||||
@ -1026,7 +1044,7 @@ if test x$unbound = xtrue; then
|
||||
fi
|
||||
|
||||
if test x$soup = xtrue; then
|
||||
PKG_CHECK_MODULES(soup, [libsoup-2.4])
|
||||
PKG_CHECK_MODULES(soup, [libsoup-3.0])
|
||||
AC_SUBST(soup_CFLAGS)
|
||||
AC_SUBST(soup_LIBS)
|
||||
fi
|
||||
@ -1220,11 +1238,6 @@ if test x$botan = xtrue; then
|
||||
LIBS=$saved_LIBS
|
||||
fi
|
||||
|
||||
if test x$uci = xtrue; then
|
||||
AC_CHECK_LIB([uci],[uci_alloc_context],[LIBS="$LIBS"],[AC_MSG_ERROR([UCI library libuci not found])],[])
|
||||
AC_CHECK_HEADER([uci.h],,[AC_MSG_ERROR([UCI header uci.h not found!])])
|
||||
fi
|
||||
|
||||
if test x$android_dns = xtrue; then
|
||||
AC_CHECK_LIB([cutils],[property_get],[LIBS="$LIBS"],[AC_MSG_ERROR([Android library libcutils not found])],[])
|
||||
AC_CHECK_HEADER([cutils/properties.h],,[AC_MSG_ERROR([Android header cutils/properties.h not found!])])
|
||||
@ -1332,6 +1345,10 @@ if test x$unwind_backtraces = xtrue; then
|
||||
AC_SUBST(UNWINDLIB)
|
||||
fi
|
||||
|
||||
if test "x$testable_ke" = xyes; then
|
||||
AC_DEFINE([TESTABLE_KE], [1], [Define to 1 if key exchange methods should be testable.])
|
||||
fi
|
||||
|
||||
AM_CONDITIONAL(USE_DEV_HEADERS, [test "x$dev_headers" != xno])
|
||||
if test x$dev_headers = xyes; then
|
||||
dev_headers="$includedir/strongswan"
|
||||
@ -1355,7 +1372,7 @@ if test x$coverage = xtrue; then
|
||||
AC_MSG_ERROR([genhtml not found])
|
||||
fi
|
||||
|
||||
COVERAGE_CFLAGS="-fprofile-arcs -ftest-coverage"
|
||||
COVERAGE_CFLAGS="-fprofile-arcs -ftest-coverage -fprofile-update=atomic"
|
||||
COVERAGE_LDFLAGS="-fprofile-arcs"
|
||||
AC_SUBST(COVERAGE_CFLAGS)
|
||||
AC_SUBST(COVERAGE_LDFLAGS)
|
||||
@ -1421,24 +1438,12 @@ if test x$ruby_gems = xtrue; then
|
||||
fi
|
||||
AM_CONDITIONAL(RUBY_GEMS_INSTALL, [test "x$ruby_gems_install" = xtrue])
|
||||
|
||||
if test x$python_eggs = xtrue; then
|
||||
if test x$python_wheels = xtrue; then
|
||||
PYTHON_PACKAGE_VERSION=`echo "$PACKAGE_VERSION" | $SED 's/dr/.dev/'`
|
||||
AC_SUBST([PYTHON_PACKAGE_VERSION])
|
||||
if test x$python_eggs_install = xtrue; then
|
||||
AC_PATH_PROG([EASY_INSTALL], [easy_install], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
||||
if test x$EASY_INSTALL = x; then
|
||||
AC_MSG_ERROR(Python easy_install not found)
|
||||
fi
|
||||
fi
|
||||
if test "x$pythoneggdir" = "xmain site-packages directory"; then
|
||||
AC_SUBST(PYTHONEGGINSTALLDIR, "")
|
||||
else
|
||||
AC_SUBST(PYTHONEGGINSTALLDIR, "--install-dir $pythoneggdir")
|
||||
fi
|
||||
AC_PATH_PROG([TOX], [tox], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
||||
AC_PATH_PROG([PY_TEST], [py.test], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
||||
fi
|
||||
AM_CONDITIONAL(PYTHON_EGGS_INSTALL, [test "x$python_eggs_install" = xtrue])
|
||||
|
||||
AM_CONDITIONAL(PERL_CPAN_INSTALL, [test "x$perl_cpan_install" = xtrue])
|
||||
|
||||
@ -1510,8 +1515,6 @@ CFLAGS="$WARN_CFLAGS $CFLAGS"
|
||||
# collect plugin list for strongSwan components
|
||||
# ===============================================
|
||||
|
||||
m4_include(m4/macros/add-plugin.m4)
|
||||
|
||||
# plugin lists for all components
|
||||
charon_plugins=
|
||||
pool_plugins=
|
||||
@ -1553,7 +1556,7 @@ ADD_PLUGIN([random], [s charon pki scripts manager medsrv attest n
|
||||
ADD_PLUGIN([nonce], [s charon nm cmd aikgen])
|
||||
ADD_PLUGIN([x509], [s charon pki scripts attest nm cmd aikgen fuzz])
|
||||
ADD_PLUGIN([revocation], [s charon pki nm cmd])
|
||||
ADD_PLUGIN([constraints], [s charon nm cmd])
|
||||
ADD_PLUGIN([constraints], [s charon pki nm cmd])
|
||||
ADD_PLUGIN([acert], [s charon])
|
||||
ADD_PLUGIN([pubkey], [s charon pki cmd aikgen])
|
||||
ADD_PLUGIN([pkcs1], [s charon pki scripts manager medsrv attest nm cmd aikgen fuzz])
|
||||
@ -1585,10 +1588,8 @@ ADD_PLUGIN([kdf], [s charon pki scripts nm cmd])
|
||||
ADD_PLUGIN([ctr], [s charon scripts nm cmd])
|
||||
ADD_PLUGIN([ccm], [s charon scripts nm cmd])
|
||||
ADD_PLUGIN([gcm], [s charon scripts nm cmd])
|
||||
ADD_PLUGIN([ntru], [s charon scripts nm cmd])
|
||||
ADD_PLUGIN([ml], [s charon scripts nm cmd])
|
||||
ADD_PLUGIN([drbg], [s charon pki scripts nm cmd])
|
||||
ADD_PLUGIN([newhope], [s charon scripts nm cmd])
|
||||
ADD_PLUGIN([bliss], [s charon pki scripts nm cmd])
|
||||
ADD_PLUGIN([curl], [s charon pki scripts nm cmd])
|
||||
ADD_PLUGIN([files], [s charon pki scripts nm cmd])
|
||||
ADD_PLUGIN([winhttp], [s charon pki scripts])
|
||||
@ -1669,7 +1670,6 @@ ADD_PLUGIN([led], [c charon])
|
||||
ADD_PLUGIN([duplicheck], [c charon])
|
||||
ADD_PLUGIN([coupling], [c charon])
|
||||
ADD_PLUGIN([radattr], [c charon])
|
||||
ADD_PLUGIN([uci], [c charon])
|
||||
ADD_PLUGIN([addrblock], [c charon])
|
||||
ADD_PLUGIN([unity], [c charon])
|
||||
ADD_PLUGIN([counters], [c charon])
|
||||
@ -1755,10 +1755,8 @@ AM_CONDITIONAL(USE_CTR, test x$ctr = xtrue)
|
||||
AM_CONDITIONAL(USE_CCM, test x$ccm = xtrue)
|
||||
AM_CONDITIONAL(USE_GCM, test x$gcm = xtrue)
|
||||
AM_CONDITIONAL(USE_AF_ALG, test x$af_alg = xtrue)
|
||||
AM_CONDITIONAL(USE_NTRU, test x$ntru = xtrue)
|
||||
AM_CONDITIONAL(USE_NEWHOPE, test x$newhope = xtrue)
|
||||
AM_CONDITIONAL(USE_BLISS, test x$bliss = xtrue)
|
||||
AM_CONDITIONAL(USE_DRBG, test x$drbg = xtrue)
|
||||
AM_CONDITIONAL(USE_ML, test x$ml = xtrue)
|
||||
|
||||
# charon plugins
|
||||
# ----------------
|
||||
@ -1766,7 +1764,6 @@ AM_CONDITIONAL(USE_STROKE, test x$stroke = xtrue)
|
||||
AM_CONDITIONAL(USE_VICI, test x$vici = xtrue)
|
||||
AM_CONDITIONAL(USE_MEDSRV, test x$medsrv = xtrue)
|
||||
AM_CONDITIONAL(USE_MEDCLI, test x$medcli = xtrue)
|
||||
AM_CONDITIONAL(USE_UCI, test x$uci = xtrue)
|
||||
AM_CONDITIONAL(USE_OSX_ATTR, test x$osx_attr = xtrue)
|
||||
AM_CONDITIONAL(USE_P_CSCF, test x$p_cscf = xtrue)
|
||||
AM_CONDITIONAL(USE_ANDROID_DNS, test x$android_dns = xtrue)
|
||||
@ -1877,7 +1874,6 @@ AM_CONDITIONAL(USE_CONFTEST, test x$conftest = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBSTRONGSWAN, test x$charon = xtrue -o x$pki = xtrue -o x$conftest = xtrue -o x$fast = xtrue -o x$imcv = xtrue -o x$nm = xtrue -o x$tkm = xtrue -o x$cmd = xtrue -o x$tls = xtrue -o x$tnc_tnccs = xtrue -o x$aikgen = xtrue -o x$svc = xtrue -o x$systemd = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBCHARON, test x$charon = xtrue -o x$conftest = xtrue -o x$nm = xtrue -o x$tkm = xtrue -o x$cmd = xtrue -o x$svc = xtrue -o x$systemd = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBIPSEC, test x$libipsec = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBNTTFFT, test x$bliss = xtrue -o x$newhope = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBTNCIF, test x$tnc_tnccs = xtrue -o x$imcv = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBTNCCS, test x$tnc_tnccs = xtrue)
|
||||
AM_CONDITIONAL(USE_LIBPTTLS, test x$tnc_tnccs = xtrue)
|
||||
@ -1908,7 +1904,7 @@ AM_CONDITIONAL(USE_LEGACY_SYSTEMD, test -n "$systemdsystemunitdir" -a "x$systemd
|
||||
AM_CONDITIONAL(USE_CERT_ENROLL, test x$cert_enroll = xtrue)
|
||||
AM_CONDITIONAL(USE_CERT_ENROLL_TIMER, test x$cert_enroll_timer = xtrue)
|
||||
AM_CONDITIONAL(USE_RUBY_GEMS, test x$ruby_gems = xtrue)
|
||||
AM_CONDITIONAL(USE_PYTHON_EGGS, test x$python_eggs = xtrue)
|
||||
AM_CONDITIONAL(USE_PYTHON_WHEELS, test x$python_wheels = xtrue)
|
||||
AM_CONDITIONAL(USE_PERL_CPAN, test x$perl_cpan = xtrue)
|
||||
AM_CONDITIONAL(USE_TOX, test "x$TOX" != x)
|
||||
AM_CONDITIONAL(USE_PY_TEST, test "x$PY_TEST" != x -a "x$TOX" = x)
|
||||
@ -1953,14 +1949,16 @@ strongswan_options=
|
||||
|
||||
AM_COND_IF([USE_AIKGEN], [strongswan_options=${strongswan_options}" aikgen"])
|
||||
AM_COND_IF([USE_ATTR_SQL], [strongswan_options=${strongswan_options}" pool"])
|
||||
AM_COND_IF([USE_CHARON], [strongswan_options=${strongswan_options}" charon charon-logging"])
|
||||
AM_COND_IF([USE_CHARON], [strongswan_options=${strongswan_options}" charon charon-logging iptfs"])
|
||||
AM_COND_IF([USE_FILE_CONFIG], [strongswan_options=${strongswan_options}" starter"])
|
||||
AM_COND_IF([USE_IMV_ATTESTATION], [strongswan_options=${strongswan_options}" attest"])
|
||||
AM_COND_IF([USE_IMCV], [strongswan_options=${strongswan_options}" imcv"])
|
||||
AM_COND_IF([USE_IMCV], [strongswan_options=${strongswan_options}" imcv imv_policy_manager"])
|
||||
AM_COND_IF([USE_IMC_SWIMA], [strongswan_options=${strongswan_options}" sw-collector"])
|
||||
AM_COND_IF([USE_IMV_SWIMA], [strongswan_options=${strongswan_options}" sec-updater"])
|
||||
AM_COND_IF([USE_LIBTNCCS], [strongswan_options=${strongswan_options}" tnc"])
|
||||
AM_COND_IF([USE_MANAGER], [strongswan_options=${strongswan_options}" manager"])
|
||||
AM_COND_IF([USE_MEDSRV], [strongswan_options=${strongswan_options}" medsrv"])
|
||||
AM_COND_IF([USE_NM], [strongswan_options=${strongswan_options}" charon-nm"])
|
||||
AM_COND_IF([USE_PKI], [strongswan_options=${strongswan_options}" pki"])
|
||||
AM_COND_IF([USE_SWANCTL], [strongswan_options=${strongswan_options}" swanctl"])
|
||||
AM_COND_IF([USE_SYSTEMD], [strongswan_options=${strongswan_options}" charon-systemd"])
|
||||
@ -1982,8 +1980,6 @@ AC_CONFIG_FILES([
|
||||
src/Makefile
|
||||
src/include/Makefile
|
||||
src/libstrongswan/Makefile
|
||||
src/libstrongswan/math/libnttfft/Makefile
|
||||
src/libstrongswan/math/libnttfft/tests/Makefile
|
||||
src/libstrongswan/plugins/aes/Makefile
|
||||
src/libstrongswan/plugins/cmac/Makefile
|
||||
src/libstrongswan/plugins/des/Makefile
|
||||
@ -2041,11 +2037,7 @@ AC_CONFIG_FILES([
|
||||
src/libstrongswan/plugins/gcm/Makefile
|
||||
src/libstrongswan/plugins/af_alg/Makefile
|
||||
src/libstrongswan/plugins/drbg/Makefile
|
||||
src/libstrongswan/plugins/ntru/Makefile
|
||||
src/libstrongswan/plugins/bliss/Makefile
|
||||
src/libstrongswan/plugins/bliss/tests/Makefile
|
||||
src/libstrongswan/plugins/newhope/Makefile
|
||||
src/libstrongswan/plugins/newhope/tests/Makefile
|
||||
src/libstrongswan/plugins/ml/Makefile
|
||||
src/libstrongswan/plugins/test_vectors/Makefile
|
||||
src/libstrongswan/tests/Makefile
|
||||
src/libipsec/Makefile
|
||||
@ -2126,7 +2118,6 @@ AC_CONFIG_FILES([
|
||||
src/libcharon/plugins/medcli/Makefile
|
||||
src/libcharon/plugins/addrblock/Makefile
|
||||
src/libcharon/plugins/unity/Makefile
|
||||
src/libcharon/plugins/uci/Makefile
|
||||
src/libcharon/plugins/ha/Makefile
|
||||
src/libcharon/plugins/kernel_netlink/Makefile
|
||||
src/libcharon/plugins/kernel_pfkey/Makefile
|
||||
|
@ -11,7 +11,7 @@ AM_CPPFLAGS = @CPPFLAGS@ \
|
||||
|
||||
fuzz_ldflags = ${libfuzzer} \
|
||||
$(top_builddir)/src/libstrongswan/.libs/libstrongswan.a \
|
||||
-Wl,-Bstatic -lgmp -Wl,-Bdynamic \
|
||||
-Wl,-Bstatic -lcrypto -Wl,-Bdynamic \
|
||||
@FUZZING_LDFLAGS@
|
||||
|
||||
pa_tnc_ldflags = \
|
||||
|
@ -2,10 +2,12 @@
|
||||
SUBDIRS =
|
||||
|
||||
if USE_LEGACY_SYSTEMD
|
||||
if USE_FILE_CONFIG
|
||||
if USE_CHARON
|
||||
SUBDIRS += systemd-starter
|
||||
endif
|
||||
endif
|
||||
endif
|
||||
|
||||
if USE_SYSTEMD
|
||||
if USE_SWANCTL
|
||||
|
@ -1,6 +1,7 @@
|
||||
[Unit]
|
||||
Description=strongSwan IPsec IKEv1/IKEv2 daemon using ipsec.conf
|
||||
After=syslog.target network-online.target
|
||||
Wants=syslog.target network-online.target
|
||||
|
||||
[Service]
|
||||
ExecStart=@SBINDIR@/@IPSEC_SCRIPT@ start --nofork
|
||||
|
@ -1,6 +1,7 @@
|
||||
[Unit]
|
||||
Description=strongSwan IPsec IKEv1/IKEv2 daemon using swanctl
|
||||
After=network-online.target
|
||||
Wants=network-online.target
|
||||
|
||||
[Service]
|
||||
Type=notify
|
||||
|
532
m4/macros/host-cpu-c-abi.m4
Normal file
532
m4/macros/host-cpu-c-abi.m4
Normal file
@ -0,0 +1,532 @@
|
||||
# host-cpu-c-abi.m4
|
||||
# serial 20
|
||||
dnl Copyright (C) 2002-2025 Free Software Foundation, Inc.
|
||||
dnl This file is free software; the Free Software Foundation
|
||||
dnl gives unlimited permission to copy and/or distribute it,
|
||||
dnl with or without modifications, as long as this notice is preserved.
|
||||
dnl This file is offered as-is, without any warranty.
|
||||
|
||||
dnl From Bruno Haible and Sam Steingold.
|
||||
|
||||
dnl Sets the HOST_CPU variable to the canonical name of the CPU.
|
||||
dnl Sets the HOST_CPU_C_ABI variable to the canonical name of the CPU with its
|
||||
dnl C language ABI (application binary interface).
|
||||
dnl Also defines __${HOST_CPU}__ and __${HOST_CPU_C_ABI}__ as C macros in
|
||||
dnl config.h.
|
||||
dnl
|
||||
dnl This canonical name can be used to select a particular assembly language
|
||||
dnl source file that will interoperate with C code on the given host.
|
||||
dnl
|
||||
dnl For example:
|
||||
dnl * 'i386' and 'sparc' are different canonical names, because code for i386
|
||||
dnl will not run on SPARC CPUs and vice versa. They have different
|
||||
dnl instruction sets.
|
||||
dnl * 'sparc' and 'sparc64' are different canonical names, because code for
|
||||
dnl 'sparc' and code for 'sparc64' cannot be linked together: 'sparc' code
|
||||
dnl contains 32-bit instructions, whereas 'sparc64' code contains 64-bit
|
||||
dnl instructions. A process on a SPARC CPU can be in 32-bit mode or in 64-bit
|
||||
dnl mode, but not both.
|
||||
dnl * 'mips' and 'mipsn32' are different canonical names, because they use
|
||||
dnl different argument passing and return conventions for C functions, and
|
||||
dnl although the instruction set of 'mips' is a large subset of the
|
||||
dnl instruction set of 'mipsn32'.
|
||||
dnl * 'mipsn32' and 'mips64' are different canonical names, because they use
|
||||
dnl different sizes for the C types like 'int' and 'void *', and although
|
||||
dnl the instruction sets of 'mipsn32' and 'mips64' are the same.
|
||||
dnl * The same canonical name is used for different endiannesses. You can
|
||||
dnl determine the endianness through preprocessor symbols:
|
||||
dnl - 'arm': test __ARMEL__.
|
||||
dnl - 'mips', 'mipsn32', 'mips64': test _MIPSEB vs. _MIPSEL.
|
||||
dnl - 'powerpc64': test __BIG_ENDIAN__ vs. __LITTLE_ENDIAN__.
|
||||
dnl * The same name 'i386' is used for CPUs of type i386, i486, i586
|
||||
dnl (Pentium), AMD K7, Pentium II, Pentium IV, etc., because
|
||||
dnl - Instructions that do not exist on all of these CPUs (cmpxchg,
|
||||
dnl MMX, SSE, SSE2, 3DNow! etc.) are not frequently used. If your
|
||||
dnl assembly language source files use such instructions, you will
|
||||
dnl need to make the distinction.
|
||||
dnl - Speed of execution of the common instruction set is reasonable across
|
||||
dnl the entire family of CPUs. If you have assembly language source files
|
||||
dnl that are optimized for particular CPU types (like GNU gmp has), you
|
||||
dnl will need to make the distinction.
|
||||
dnl See <https://en.wikipedia.org/wiki/X86_instruction_listings>.
|
||||
AC_DEFUN([gl_HOST_CPU_C_ABI],
|
||||
[
|
||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
||||
AC_REQUIRE([gl_C_ASM])
|
||||
AC_CACHE_CHECK([host CPU and C ABI], [gl_cv_host_cpu_c_abi],
|
||||
[case "$host_cpu" in
|
||||
|
||||
changequote(,)dnl
|
||||
i[34567]86 )
|
||||
changequote([,])dnl
|
||||
gl_cv_host_cpu_c_abi=i386
|
||||
;;
|
||||
|
||||
x86_64 )
|
||||
# On x86_64 systems, the C compiler may be generating code in one of
|
||||
# these ABIs:
|
||||
# - 64-bit instruction set, 64-bit pointers, 64-bit 'long': x86_64.
|
||||
# - 64-bit instruction set, 64-bit pointers, 32-bit 'long': x86_64
|
||||
# with native Windows (mingw, MSVC).
|
||||
# - 64-bit instruction set, 32-bit pointers, 32-bit 'long': x86_64-x32.
|
||||
# - 32-bit instruction set, 32-bit pointers, 32-bit 'long': i386.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if (defined __x86_64__ || defined __amd64__ \
|
||||
|| defined _M_X64 || defined _M_AMD64)
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __ILP32__ || defined _ILP32
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=x86_64-x32],
|
||||
[gl_cv_host_cpu_c_abi=x86_64])],
|
||||
[gl_cv_host_cpu_c_abi=i386])
|
||||
;;
|
||||
|
||||
changequote(,)dnl
|
||||
alphaev[4-8] | alphaev56 | alphapca5[67] | alphaev6[78] )
|
||||
changequote([,])dnl
|
||||
gl_cv_host_cpu_c_abi=alpha
|
||||
;;
|
||||
|
||||
arm* | aarch64 )
|
||||
# Assume arm with EABI.
|
||||
# On arm64 systems, the C compiler may be generating code in one of
|
||||
# these ABIs:
|
||||
# - aarch64 instruction set, 64-bit pointers, 64-bit 'long': arm64.
|
||||
# - aarch64 instruction set, 32-bit pointers, 32-bit 'long': arm64-ilp32.
|
||||
# - 32-bit instruction set, 32-bit pointers, 32-bit 'long': arm or armhf.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#ifdef __aarch64__
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __ILP32__ || defined _ILP32
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=arm64-ilp32],
|
||||
[gl_cv_host_cpu_c_abi=arm64])],
|
||||
[# Don't distinguish little-endian and big-endian arm, since they
|
||||
# don't require different machine code for simple operations and
|
||||
# since the user can distinguish them through the preprocessor
|
||||
# defines __ARMEL__ vs. __ARMEB__.
|
||||
# But distinguish arm which passes floating-point arguments and
|
||||
# return values in integer registers (r0, r1, ...) - this is
|
||||
# gcc -mfloat-abi=soft or gcc -mfloat-abi=softfp - from arm which
|
||||
# passes them in float registers (s0, s1, ...) and double registers
|
||||
# (d0, d1, ...) - this is gcc -mfloat-abi=hard. GCC 4.6 or newer
|
||||
# sets the preprocessor defines __ARM_PCS (for the first case) and
|
||||
# __ARM_PCS_VFP (for the second case), but older GCC does not.
|
||||
echo 'double ddd; void func (double dd) { ddd = dd; }' > conftest.c
|
||||
# Look for a reference to the register d0 in the .s file.
|
||||
AC_TRY_COMMAND(${CC-cc} $CFLAGS $CPPFLAGS $gl_c_asm_opt conftest.c) >/dev/null 2>&1
|
||||
if LC_ALL=C grep 'd0,' conftest.$gl_asmext >/dev/null; then
|
||||
gl_cv_host_cpu_c_abi=armhf
|
||||
else
|
||||
gl_cv_host_cpu_c_abi=arm
|
||||
fi
|
||||
rm -fr conftest*
|
||||
])
|
||||
;;
|
||||
|
||||
hppa1.0 | hppa1.1 | hppa2.0* | hppa64 )
|
||||
# On hppa, the C compiler may be generating 32-bit code or 64-bit
|
||||
# code. In the latter case, it defines _LP64 and __LP64__.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#ifdef __LP64__
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=hppa64],
|
||||
[gl_cv_host_cpu_c_abi=hppa])
|
||||
;;
|
||||
|
||||
ia64* )
|
||||
# On ia64 on HP-UX, the C compiler may be generating 64-bit code or
|
||||
# 32-bit code. In the latter case, it defines _ILP32.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#ifdef _ILP32
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=ia64-ilp32],
|
||||
[gl_cv_host_cpu_c_abi=ia64])
|
||||
;;
|
||||
|
||||
mips* )
|
||||
# We should also check for (_MIPS_SZPTR == 64), but gcc keeps this
|
||||
# at 32.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined _MIPS_SZLONG && (_MIPS_SZLONG == 64)
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=mips64],
|
||||
[# In the n32 ABI, _ABIN32 is defined, _ABIO32 is not defined (but
|
||||
# may later get defined by <sgidefs.h>), and _MIPS_SIM == _ABIN32.
|
||||
# In the 32 ABI, _ABIO32 is defined, _ABIN32 is not defined (but
|
||||
# may later get defined by <sgidefs.h>), and _MIPS_SIM == _ABIO32.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if (_MIPS_SIM == _ABIN32)
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=mipsn32],
|
||||
[gl_cv_host_cpu_c_abi=mips])])
|
||||
;;
|
||||
|
||||
powerpc* )
|
||||
# Different ABIs are in use on AIX vs. Mac OS X vs. Linux,*BSD.
|
||||
# No need to distinguish them here; the caller may distinguish
|
||||
# them based on the OS.
|
||||
# On powerpc64 systems, the C compiler may still be generating
|
||||
# 32-bit code. And on powerpc-ibm-aix systems, the C compiler may
|
||||
# be generating 64-bit code.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __powerpc64__ || defined __LP64__
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[# On powerpc64, there are two ABIs on Linux: The AIX compatible
|
||||
# one and the ELFv2 one. The latter defines _CALL_ELF=2.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined _CALL_ELF && _CALL_ELF == 2
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=powerpc64-elfv2],
|
||||
[gl_cv_host_cpu_c_abi=powerpc64])
|
||||
],
|
||||
[gl_cv_host_cpu_c_abi=powerpc])
|
||||
;;
|
||||
|
||||
rs6000 )
|
||||
gl_cv_host_cpu_c_abi=powerpc
|
||||
;;
|
||||
|
||||
riscv32 | riscv64 )
|
||||
# There are 2 architectures (with variants): rv32* and rv64*.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if __riscv_xlen == 64
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[cpu=riscv64],
|
||||
[cpu=riscv32])
|
||||
# There are 6 ABIs: ilp32, ilp32f, ilp32d, lp64, lp64f, lp64d.
|
||||
# Size of 'long' and 'void *':
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __LP64__
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[main_abi=lp64],
|
||||
[main_abi=ilp32])
|
||||
# Float ABIs:
|
||||
# __riscv_float_abi_double:
|
||||
# 'float' and 'double' are passed in floating-point registers.
|
||||
# __riscv_float_abi_single:
|
||||
# 'float' are passed in floating-point registers.
|
||||
# __riscv_float_abi_soft:
|
||||
# No values are passed in floating-point registers.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __riscv_float_abi_double
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[float_abi=d],
|
||||
[AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __riscv_float_abi_single
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[float_abi=f],
|
||||
[float_abi=''])
|
||||
])
|
||||
gl_cv_host_cpu_c_abi="${cpu}-${main_abi}${float_abi}"
|
||||
;;
|
||||
|
||||
s390* )
|
||||
# On s390x, the C compiler may be generating 64-bit (= s390x) code
|
||||
# or 31-bit (= s390) code.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __LP64__ || defined __s390x__
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=s390x],
|
||||
[gl_cv_host_cpu_c_abi=s390])
|
||||
;;
|
||||
|
||||
sparc | sparc64 )
|
||||
# UltraSPARCs running Linux have `uname -m` = "sparc64", but the
|
||||
# C compiler still generates 32-bit code.
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[#if defined __sparcv9 || defined __arch64__
|
||||
int ok;
|
||||
#else
|
||||
error fail
|
||||
#endif
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi=sparc64],
|
||||
[gl_cv_host_cpu_c_abi=sparc])
|
||||
;;
|
||||
|
||||
*)
|
||||
gl_cv_host_cpu_c_abi="$host_cpu"
|
||||
;;
|
||||
esac
|
||||
])
|
||||
|
||||
dnl In most cases, $HOST_CPU and $HOST_CPU_C_ABI are the same.
|
||||
HOST_CPU=`echo "$gl_cv_host_cpu_c_abi" | sed -e 's/-.*//'`
|
||||
HOST_CPU_C_ABI="$gl_cv_host_cpu_c_abi"
|
||||
AC_SUBST([HOST_CPU])
|
||||
AC_SUBST([HOST_CPU_C_ABI])
|
||||
|
||||
# This was
|
||||
# AC_DEFINE_UNQUOTED([__${HOST_CPU}__])
|
||||
# AC_DEFINE_UNQUOTED([__${HOST_CPU_C_ABI}__])
|
||||
# earlier, but KAI C++ 3.2d doesn't like this.
|
||||
sed -e 's/-/_/g' >> confdefs.h <<EOF
|
||||
#ifndef __${HOST_CPU}__
|
||||
#define __${HOST_CPU}__ 1
|
||||
#endif
|
||||
#ifndef __${HOST_CPU_C_ABI}__
|
||||
#define __${HOST_CPU_C_ABI}__ 1
|
||||
#endif
|
||||
EOF
|
||||
AH_TOP([/* CPU and C ABI indicator */
|
||||
#ifndef __i386__
|
||||
#undef __i386__
|
||||
#endif
|
||||
#ifndef __x86_64_x32__
|
||||
#undef __x86_64_x32__
|
||||
#endif
|
||||
#ifndef __x86_64__
|
||||
#undef __x86_64__
|
||||
#endif
|
||||
#ifndef __alpha__
|
||||
#undef __alpha__
|
||||
#endif
|
||||
#ifndef __arm__
|
||||
#undef __arm__
|
||||
#endif
|
||||
#ifndef __armhf__
|
||||
#undef __armhf__
|
||||
#endif
|
||||
#ifndef __arm64_ilp32__
|
||||
#undef __arm64_ilp32__
|
||||
#endif
|
||||
#ifndef __arm64__
|
||||
#undef __arm64__
|
||||
#endif
|
||||
#ifndef __hppa__
|
||||
#undef __hppa__
|
||||
#endif
|
||||
#ifndef __hppa64__
|
||||
#undef __hppa64__
|
||||
#endif
|
||||
#ifndef __ia64_ilp32__
|
||||
#undef __ia64_ilp32__
|
||||
#endif
|
||||
#ifndef __ia64__
|
||||
#undef __ia64__
|
||||
#endif
|
||||
#ifndef __loongarch32__
|
||||
#undef __loongarch32__
|
||||
#endif
|
||||
#ifndef __loongarch64__
|
||||
#undef __loongarch64__
|
||||
#endif
|
||||
#ifndef __m68k__
|
||||
#undef __m68k__
|
||||
#endif
|
||||
#ifndef __mips__
|
||||
#undef __mips__
|
||||
#endif
|
||||
#ifndef __mipsn32__
|
||||
#undef __mipsn32__
|
||||
#endif
|
||||
#ifndef __mips64__
|
||||
#undef __mips64__
|
||||
#endif
|
||||
#ifndef __powerpc__
|
||||
#undef __powerpc__
|
||||
#endif
|
||||
#ifndef __powerpc64__
|
||||
#undef __powerpc64__
|
||||
#endif
|
||||
#ifndef __powerpc64_elfv2__
|
||||
#undef __powerpc64_elfv2__
|
||||
#endif
|
||||
#ifndef __riscv32__
|
||||
#undef __riscv32__
|
||||
#endif
|
||||
#ifndef __riscv64__
|
||||
#undef __riscv64__
|
||||
#endif
|
||||
#ifndef __riscv32_ilp32__
|
||||
#undef __riscv32_ilp32__
|
||||
#endif
|
||||
#ifndef __riscv32_ilp32f__
|
||||
#undef __riscv32_ilp32f__
|
||||
#endif
|
||||
#ifndef __riscv32_ilp32d__
|
||||
#undef __riscv32_ilp32d__
|
||||
#endif
|
||||
#ifndef __riscv64_ilp32__
|
||||
#undef __riscv64_ilp32__
|
||||
#endif
|
||||
#ifndef __riscv64_ilp32f__
|
||||
#undef __riscv64_ilp32f__
|
||||
#endif
|
||||
#ifndef __riscv64_ilp32d__
|
||||
#undef __riscv64_ilp32d__
|
||||
#endif
|
||||
#ifndef __riscv64_lp64__
|
||||
#undef __riscv64_lp64__
|
||||
#endif
|
||||
#ifndef __riscv64_lp64f__
|
||||
#undef __riscv64_lp64f__
|
||||
#endif
|
||||
#ifndef __riscv64_lp64d__
|
||||
#undef __riscv64_lp64d__
|
||||
#endif
|
||||
#ifndef __s390__
|
||||
#undef __s390__
|
||||
#endif
|
||||
#ifndef __s390x__
|
||||
#undef __s390x__
|
||||
#endif
|
||||
#ifndef __sh__
|
||||
#undef __sh__
|
||||
#endif
|
||||
#ifndef __sparc__
|
||||
#undef __sparc__
|
||||
#endif
|
||||
#ifndef __sparc64__
|
||||
#undef __sparc64__
|
||||
#endif
|
||||
])
|
||||
|
||||
])
|
||||
|
||||
|
||||
dnl Sets the HOST_CPU_C_ABI_32BIT variable to 'yes' if the C language ABI
|
||||
dnl (application binary interface) is a 32-bit one, to 'no' if it is a 64-bit
|
||||
dnl one.
|
||||
dnl This is a simplified variant of gl_HOST_CPU_C_ABI.
|
||||
AC_DEFUN([gl_HOST_CPU_C_ABI_32BIT],
|
||||
[
|
||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
||||
AC_CACHE_CHECK([32-bit host C ABI], [gl_cv_host_cpu_c_abi_32bit],
|
||||
[case "$host_cpu" in
|
||||
|
||||
# CPUs that only support a 32-bit ABI.
|
||||
arc \
|
||||
| bfin \
|
||||
| cris* \
|
||||
| csky \
|
||||
| epiphany \
|
||||
| ft32 \
|
||||
| h8300 \
|
||||
| m68k \
|
||||
| microblaze | microblazeel \
|
||||
| nds32 | nds32le | nds32be \
|
||||
| nios2 | nios2eb | nios2el \
|
||||
| or1k* \
|
||||
| or32 \
|
||||
| sh | sh[1234] | sh[1234]e[lb] \
|
||||
| tic6x \
|
||||
| xtensa* )
|
||||
gl_cv_host_cpu_c_abi_32bit=yes
|
||||
;;
|
||||
|
||||
# CPUs that only support a 64-bit ABI.
|
||||
changequote(,)dnl
|
||||
alpha | alphaev[4-8] | alphaev56 | alphapca5[67] | alphaev6[78] \
|
||||
| mmix )
|
||||
changequote([,])dnl
|
||||
gl_cv_host_cpu_c_abi_32bit=no
|
||||
;;
|
||||
|
||||
*)
|
||||
if test -n "$gl_cv_host_cpu_c_abi"; then
|
||||
dnl gl_HOST_CPU_C_ABI has already been run. Use its result.
|
||||
case "$gl_cv_host_cpu_c_abi" in
|
||||
i386 | x86_64-x32 | arm | armhf | arm64-ilp32 | hppa | ia64-ilp32 | loongarch32 | mips | mipsn32 | powerpc | riscv*-ilp32* | s390 | sparc)
|
||||
gl_cv_host_cpu_c_abi_32bit=yes ;;
|
||||
x86_64 | alpha | arm64 | aarch64c | hppa64 | ia64 | loongarch64 | mips64 | powerpc64 | powerpc64-elfv2 | riscv*-lp64* | s390x | sparc64 )
|
||||
gl_cv_host_cpu_c_abi_32bit=no ;;
|
||||
*)
|
||||
gl_cv_host_cpu_c_abi_32bit=unknown ;;
|
||||
esac
|
||||
else
|
||||
gl_cv_host_cpu_c_abi_32bit=unknown
|
||||
fi
|
||||
if test $gl_cv_host_cpu_c_abi_32bit = unknown; then
|
||||
AC_COMPILE_IFELSE(
|
||||
[AC_LANG_SOURCE(
|
||||
[[int test_pointer_size[sizeof (void *) - 5];
|
||||
]])],
|
||||
[gl_cv_host_cpu_c_abi_32bit=no],
|
||||
[gl_cv_host_cpu_c_abi_32bit=yes])
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
])
|
||||
|
||||
HOST_CPU_C_ABI_32BIT="$gl_cv_host_cpu_c_abi_32bit"
|
||||
])
|
334
m4/macros/lib-prefix.m4
Normal file
334
m4/macros/lib-prefix.m4
Normal file
@ -0,0 +1,334 @@
|
||||
# lib-prefix.m4
|
||||
# serial 23
|
||||
dnl Copyright (C) 2001-2005, 2008-2025 Free Software Foundation, Inc.
|
||||
dnl This file is free software; the Free Software Foundation
|
||||
dnl gives unlimited permission to copy and/or distribute it,
|
||||
dnl with or without modifications, as long as this notice is preserved.
|
||||
dnl This file is offered as-is, without any warranty.
|
||||
|
||||
dnl From Bruno Haible.
|
||||
|
||||
dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed
|
||||
dnl to access previously installed libraries. The basic assumption is that
|
||||
dnl a user will want packages to use other packages he previously installed
|
||||
dnl with the same --prefix option.
|
||||
dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate
|
||||
dnl libraries, but is otherwise very convenient.
|
||||
AC_DEFUN([AC_LIB_PREFIX],
|
||||
[
|
||||
AC_BEFORE([$0], [AC_LIB_LINKFLAGS])
|
||||
AC_REQUIRE([AC_PROG_CC])
|
||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
||||
AC_REQUIRE([AC_LIB_PREPARE_MULTILIB])
|
||||
AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
|
||||
dnl By default, look in $includedir and $libdir.
|
||||
use_additional=yes
|
||||
AC_LIB_WITH_FINAL_PREFIX([
|
||||
eval additional_includedir=\"$includedir\"
|
||||
eval additional_libdir=\"$libdir\"
|
||||
])
|
||||
AC_ARG_WITH([lib-prefix],
|
||||
[[ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib
|
||||
--without-lib-prefix don't search for libraries in includedir and libdir]],
|
||||
[
|
||||
if test "X$withval" = "Xno"; then
|
||||
use_additional=no
|
||||
else
|
||||
if test "X$withval" = "X"; then
|
||||
AC_LIB_WITH_FINAL_PREFIX([
|
||||
eval additional_includedir=\"$includedir\"
|
||||
eval additional_libdir=\"$libdir\"
|
||||
])
|
||||
else
|
||||
additional_includedir="$withval/include"
|
||||
additional_libdir="$withval/$acl_libdirstem"
|
||||
fi
|
||||
fi
|
||||
])
|
||||
if test $use_additional = yes; then
|
||||
dnl Potentially add $additional_includedir to $CPPFLAGS.
|
||||
dnl But don't add it
|
||||
dnl 1. if it's the standard /usr/include,
|
||||
dnl 2. if it's already present in $CPPFLAGS,
|
||||
dnl 3. if it's /usr/local/include and we are using GCC on Linux,
|
||||
dnl 4. if it doesn't exist as a directory.
|
||||
if test "X$additional_includedir" != "X/usr/include"; then
|
||||
haveit=
|
||||
for x in $CPPFLAGS; do
|
||||
AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
|
||||
if test "X$x" = "X-I$additional_includedir"; then
|
||||
haveit=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
if test -z "$haveit"; then
|
||||
if test "X$additional_includedir" = "X/usr/local/include"; then
|
||||
if test -n "$GCC"; then
|
||||
case $host_os in
|
||||
linux* | gnu* | k*bsd*-gnu) haveit=yes;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
if test -z "$haveit"; then
|
||||
if test -d "$additional_includedir"; then
|
||||
dnl Really add $additional_includedir to $CPPFLAGS.
|
||||
CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
dnl Potentially add $additional_libdir to $LDFLAGS.
|
||||
dnl But don't add it
|
||||
dnl 1. if it's the standard /usr/lib,
|
||||
dnl 2. if it's already present in $LDFLAGS,
|
||||
dnl 3. if it's /usr/local/lib and we are using GCC on Linux,
|
||||
dnl 4. if it doesn't exist as a directory.
|
||||
if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then
|
||||
haveit=
|
||||
for x in $LDFLAGS; do
|
||||
AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
|
||||
if test "X$x" = "X-L$additional_libdir"; then
|
||||
haveit=yes
|
||||
break
|
||||
fi
|
||||
done
|
||||
if test -z "$haveit"; then
|
||||
if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then
|
||||
if test -n "$GCC"; then
|
||||
case $host_os in
|
||||
linux*) haveit=yes;;
|
||||
esac
|
||||
fi
|
||||
fi
|
||||
if test -z "$haveit"; then
|
||||
if test -d "$additional_libdir"; then
|
||||
dnl Really add $additional_libdir to $LDFLAGS.
|
||||
LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
])
|
||||
|
||||
dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix,
|
||||
dnl acl_final_exec_prefix, containing the values to which $prefix and
|
||||
dnl $exec_prefix will expand at the end of the configure script.
|
||||
AC_DEFUN([AC_LIB_PREPARE_PREFIX],
|
||||
[
|
||||
dnl Unfortunately, prefix and exec_prefix get only finally determined
|
||||
dnl at the end of configure.
|
||||
if test "X$prefix" = "XNONE"; then
|
||||
acl_final_prefix="$ac_default_prefix"
|
||||
else
|
||||
acl_final_prefix="$prefix"
|
||||
fi
|
||||
if test "X$exec_prefix" = "XNONE"; then
|
||||
acl_final_exec_prefix='${prefix}'
|
||||
else
|
||||
acl_final_exec_prefix="$exec_prefix"
|
||||
fi
|
||||
acl_saved_prefix="$prefix"
|
||||
prefix="$acl_final_prefix"
|
||||
eval acl_final_exec_prefix=\"$acl_final_exec_prefix\"
|
||||
prefix="$acl_saved_prefix"
|
||||
])
|
||||
|
||||
dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the
|
||||
dnl variables prefix and exec_prefix bound to the values they will have
|
||||
dnl at the end of the configure script.
|
||||
AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX],
|
||||
[
|
||||
acl_saved_prefix="$prefix"
|
||||
prefix="$acl_final_prefix"
|
||||
acl_saved_exec_prefix="$exec_prefix"
|
||||
exec_prefix="$acl_final_exec_prefix"
|
||||
$1
|
||||
exec_prefix="$acl_saved_exec_prefix"
|
||||
prefix="$acl_saved_prefix"
|
||||
])
|
||||
|
||||
dnl AC_LIB_PREPARE_MULTILIB creates
|
||||
dnl - a function acl_is_expected_elfclass, that tests whether standard input
|
||||
dnl; has a 32-bit or 64-bit ELF header, depending on the host CPU ABI,
|
||||
dnl - 3 variables acl_libdirstem, acl_libdirstem2, acl_libdirstem3, containing
|
||||
dnl the basename of the libdir to try in turn, either "lib" or "lib64" or
|
||||
dnl "lib/64" or "lib32" or "lib/sparcv9" or "lib/amd64" or similar.
|
||||
AC_DEFUN([AC_LIB_PREPARE_MULTILIB],
|
||||
[
|
||||
dnl There is no formal standard regarding lib, lib32, and lib64.
|
||||
dnl On most glibc systems, the current practice is that on a system supporting
|
||||
dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under
|
||||
dnl $prefix/lib64 and 32-bit libraries go under $prefix/lib. However, on
|
||||
dnl Arch Linux based distributions, it's the opposite: 32-bit libraries go
|
||||
dnl under $prefix/lib32 and 64-bit libraries go under $prefix/lib.
|
||||
dnl We determine the compiler's default mode by looking at the compiler's
|
||||
dnl library search path. If at least one of its elements ends in /lib64 or
|
||||
dnl points to a directory whose absolute pathname ends in /lib64, we use that
|
||||
dnl for 64-bit ABIs. Similarly for 32-bit ABIs. Otherwise we use the default,
|
||||
dnl namely "lib".
|
||||
dnl On Solaris systems, the current practice is that on a system supporting
|
||||
dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under
|
||||
dnl $prefix/lib/64 (which is a symlink to either $prefix/lib/sparcv9 or
|
||||
dnl $prefix/lib/amd64) and 32-bit libraries go under $prefix/lib.
|
||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
||||
AC_REQUIRE([gl_HOST_CPU_C_ABI_32BIT])
|
||||
|
||||
AC_CACHE_CHECK([for ELF binary format], [gl_cv_elf],
|
||||
[AC_EGREP_CPP([Extensible Linking Format],
|
||||
[#if defined __ELF__ || (defined __linux__ && (defined __EDG__ || defined __SUNPRO_C))
|
||||
Extensible Linking Format
|
||||
#endif
|
||||
],
|
||||
[gl_cv_elf=yes],
|
||||
[gl_cv_elf=no])
|
||||
])
|
||||
if test $gl_cv_elf = yes; then
|
||||
# Extract the ELF class of a file (5th byte) in decimal.
|
||||
# Cf. https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
|
||||
if od -A x < /dev/null >/dev/null 2>/dev/null; then
|
||||
# Use POSIX od.
|
||||
func_elfclass ()
|
||||
{
|
||||
od -A n -t d1 -j 4 -N 1
|
||||
}
|
||||
else
|
||||
# Use BSD hexdump.
|
||||
func_elfclass ()
|
||||
{
|
||||
dd bs=1 count=1 skip=4 2>/dev/null | hexdump -e '1/1 "%3d "'
|
||||
echo
|
||||
}
|
||||
fi
|
||||
# Use 'expr', not 'test', to compare the values of func_elfclass, because on
|
||||
# Solaris 11 OpenIndiana and Solaris 11 OmniOS, the result is 001 or 002,
|
||||
# not 1 or 2.
|
||||
changequote(,)dnl
|
||||
case $HOST_CPU_C_ABI_32BIT in
|
||||
yes)
|
||||
# 32-bit ABI.
|
||||
acl_is_expected_elfclass ()
|
||||
{
|
||||
expr "`func_elfclass | sed -e 's/[ ]//g'`" = 1 > /dev/null
|
||||
}
|
||||
;;
|
||||
no)
|
||||
# 64-bit ABI.
|
||||
acl_is_expected_elfclass ()
|
||||
{
|
||||
expr "`func_elfclass | sed -e 's/[ ]//g'`" = 2 > /dev/null
|
||||
}
|
||||
;;
|
||||
*)
|
||||
# Unknown.
|
||||
acl_is_expected_elfclass ()
|
||||
{
|
||||
:
|
||||
}
|
||||
;;
|
||||
esac
|
||||
changequote([,])dnl
|
||||
else
|
||||
acl_is_expected_elfclass ()
|
||||
{
|
||||
:
|
||||
}
|
||||
fi
|
||||
|
||||
dnl Allow the user to override the result by setting acl_cv_libdirstems.
|
||||
AC_CACHE_CHECK([for the common suffixes of directories in the library search path],
|
||||
[acl_cv_libdirstems],
|
||||
[dnl Try 'lib' first, because that's the default for libdir in GNU, see
|
||||
dnl <https://www.gnu.org/prep/standards/html_node/Directory-Variables.html>.
|
||||
acl_libdirstem=lib
|
||||
acl_libdirstem2=
|
||||
acl_libdirstem3=
|
||||
case "$host_os" in
|
||||
solaris*)
|
||||
dnl See Solaris 10 Software Developer Collection > Solaris 64-bit Developer's Guide > The Development Environment
|
||||
dnl <https://docs.oracle.com/cd/E19253-01/816-5138/dev-env/index.html>.
|
||||
dnl "Portable Makefiles should refer to any library directories using the 64 symbolic link."
|
||||
dnl But we want to recognize the sparcv9 or amd64 subdirectory also if the
|
||||
dnl symlink is missing, so we set acl_libdirstem2 too.
|
||||
if test $HOST_CPU_C_ABI_32BIT = no; then
|
||||
acl_libdirstem2=lib/64
|
||||
case "$host_cpu" in
|
||||
sparc*) acl_libdirstem3=lib/sparcv9 ;;
|
||||
i*86 | x86_64) acl_libdirstem3=lib/amd64 ;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
netbsd*)
|
||||
dnl On NetBSD/sparc64, there is a 'sparc' subdirectory that contains
|
||||
dnl 32-bit libraries.
|
||||
if test $HOST_CPU_C_ABI_32BIT != no; then
|
||||
case "$host_cpu" in
|
||||
sparc*) acl_libdirstem2=lib/sparc ;;
|
||||
esac
|
||||
fi
|
||||
;;
|
||||
*)
|
||||
dnl If $CC generates code for a 32-bit ABI, the libraries are
|
||||
dnl surely under $prefix/lib or $prefix/lib32, not $prefix/lib64.
|
||||
dnl Similarly, if $CC generates code for a 64-bit ABI, the libraries
|
||||
dnl are surely under $prefix/lib or $prefix/lib64, not $prefix/lib32.
|
||||
dnl Find the compiler's search path. However, non-system compilers
|
||||
dnl sometimes have odd library search paths. But we can't simply invoke
|
||||
dnl '/usr/bin/gcc -print-search-dirs' because that would not take into
|
||||
dnl account the -m32/-m31 or -m64 options from the $CC or $CFLAGS.
|
||||
searchpath=`(LC_ALL=C $CC $CPPFLAGS $CFLAGS -print-search-dirs) 2>/dev/null \
|
||||
| sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'`
|
||||
if test $HOST_CPU_C_ABI_32BIT != no; then
|
||||
# 32-bit or unknown ABI.
|
||||
if test -d /usr/lib32; then
|
||||
acl_libdirstem2=lib32
|
||||
fi
|
||||
fi
|
||||
if test $HOST_CPU_C_ABI_32BIT != yes; then
|
||||
# 64-bit or unknown ABI.
|
||||
if test -d /usr/lib64; then
|
||||
acl_libdirstem3=lib64
|
||||
fi
|
||||
fi
|
||||
if test -n "$searchpath"; then
|
||||
acl_saved_IFS="${IFS= }"; IFS=":"
|
||||
for searchdir in $searchpath; do
|
||||
if test -d "$searchdir"; then
|
||||
case "$searchdir" in
|
||||
*/lib32/ | */lib32 ) acl_libdirstem2=lib32 ;;
|
||||
*/lib64/ | */lib64 ) acl_libdirstem3=lib64 ;;
|
||||
*/../ | */.. )
|
||||
# Better ignore directories of this form. They are misleading.
|
||||
;;
|
||||
*) searchdir=`cd "$searchdir" && pwd`
|
||||
case "$searchdir" in
|
||||
*/lib32 ) acl_libdirstem2=lib32 ;;
|
||||
*/lib64 ) acl_libdirstem3=lib64 ;;
|
||||
esac ;;
|
||||
esac
|
||||
fi
|
||||
done
|
||||
IFS="$acl_saved_IFS"
|
||||
if test $HOST_CPU_C_ABI_32BIT = yes; then
|
||||
# 32-bit ABI.
|
||||
acl_libdirstem3=
|
||||
fi
|
||||
if test $HOST_CPU_C_ABI_32BIT = no; then
|
||||
# 64-bit ABI.
|
||||
acl_libdirstem2=
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem"
|
||||
test -n "$acl_libdirstem3" || acl_libdirstem3="$acl_libdirstem"
|
||||
acl_cv_libdirstems="$acl_libdirstem,$acl_libdirstem2,$acl_libdirstem3"
|
||||
])
|
||||
dnl Decompose acl_cv_libdirstems into acl_libdirstem, acl_libdirstem2, and
|
||||
dnl acl_libdirstem3.
|
||||
changequote(,)dnl
|
||||
acl_libdirstem=`echo "$acl_cv_libdirstems" | sed -e 's/,.*//'`
|
||||
acl_libdirstem2=`echo "$acl_cv_libdirstems" | sed -e 's/^[^,]*,//' -e 's/,.*//'`
|
||||
acl_libdirstem3=`echo "$acl_cv_libdirstems" | sed -e 's/^[^,]*,[^,]*,//' -e 's/,.*//'`
|
||||
changequote([,])dnl
|
||||
])
|
1
scripts/.gitignore
vendored
1
scripts/.gitignore
vendored
@ -17,3 +17,4 @@ thread_analysis
|
||||
tls_test
|
||||
timeattack
|
||||
os_info
|
||||
nist_kem_kat
|
||||
|
@ -7,7 +7,7 @@ AM_CPPFLAGS = \
|
||||
|
||||
noinst_PROGRAMS = bin2array bin2sql id2sql key2keyid keyid2sql oid2der \
|
||||
thread_analysis dh_speed pubkey_speed crypt_burn hash_burn fetch \
|
||||
dnssec malloc_speed aes-test settings-test timeattack
|
||||
dnssec malloc_speed aes-test settings-test timeattack nist_kem_kat
|
||||
|
||||
if USE_TLS
|
||||
noinst_PROGRAMS += tls_test
|
||||
@ -31,6 +31,7 @@ malloc_speed_SOURCES = malloc_speed.c
|
||||
fetch_SOURCES = fetch.c
|
||||
dnssec_SOURCES = dnssec.c
|
||||
timeattack_SOURCES = timeattack.c
|
||||
nist_kem_kat_SOURCES = nist_kem_kat.c
|
||||
|
||||
id2sql_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||
key2keyid_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||
@ -46,6 +47,7 @@ dnssec_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||
aes_test_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||
settings_test_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||
timeattack_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la $(RTLIB)
|
||||
nist_kem_kat_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||
|
||||
if USE_IMCV
|
||||
AM_CPPFLAGS += -I$(top_srcdir)/src/libimcv
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2023 Tobias Brunner
|
||||
* Copyright (C) 2023-2024 Tobias Brunner
|
||||
* Copyright (C) 2009 Martin Willi
|
||||
*
|
||||
* Copyright (C) secunet Security Networks AG
|
||||
@ -56,13 +56,14 @@ static void run_test(key_exchange_method_t method, int rounds)
|
||||
method);
|
||||
return;
|
||||
}
|
||||
assert(r[0]->get_public_key(r[0], &rpublic[0]));
|
||||
for (round = 1; round < rounds; round++)
|
||||
{
|
||||
r[round] = lib->crypto->create_ke(lib->crypto, method);
|
||||
assert(r[round]->get_public_key(r[round], &rpublic[round]));
|
||||
}
|
||||
|
||||
/* make sure to use the method call order documented in the
|
||||
* key_exchange_t header file */
|
||||
|
||||
printf("%N:\t", key_exchange_method_names, method);
|
||||
|
||||
start_timing(&timing);
|
||||
@ -73,12 +74,14 @@ static void run_test(key_exchange_method_t method, int rounds)
|
||||
}
|
||||
printf("A = g^a/s: %8.1f", rounds / end_timing(&timing));
|
||||
|
||||
start_timing(&timing);
|
||||
for (round = 0; round < rounds; round++)
|
||||
{
|
||||
assert(r[round]->set_public_key(r[round], lpublic[round]));
|
||||
assert(r[round]->get_public_key(r[round], &rpublic[round]));
|
||||
assert(r[round]->get_shared_secret(r[round], &rsecret[round]));
|
||||
chunk_free(&lpublic[round]);
|
||||
}
|
||||
printf(" | S = A^b/s: %8.1f", rounds / end_timing(&timing));
|
||||
|
||||
start_timing(&timing);
|
||||
for (round = 0; round < rounds; round++)
|
||||
@ -93,6 +96,7 @@ static void run_test(key_exchange_method_t method, int rounds)
|
||||
assert(chunk_equals(rsecret[round], lsecret[round]));
|
||||
chunk_free(&lsecret[round]);
|
||||
chunk_free(&rsecret[round]);
|
||||
chunk_free(&lpublic[round]);
|
||||
chunk_free(&rpublic[round]);
|
||||
l[round]->destroy(l[round]);
|
||||
r[round]->destroy(r[round]);
|
||||
|
189
scripts/nist_kem_kat.c
Normal file
189
scripts/nist_kem_kat.c
Normal file
@ -0,0 +1,189 @@
|
||||
/*
|
||||
* Copyright (C) 2019-2020 Andreas Steffen
|
||||
*
|
||||
* Copyright (C) secunet Security Networks AG
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU General Public License as published by the
|
||||
* Free Software Foundation; either version 2 of the License, or (at your
|
||||
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful, but
|
||||
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
||||
* for more details.
|
||||
*/
|
||||
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <getopt.h>
|
||||
#include <errno.h>
|
||||
|
||||
#include <library.h>
|
||||
|
||||
static void usage(FILE *out, char *name)
|
||||
{
|
||||
fprintf(out, "Convert NIST KEM KAT file into struct\n");
|
||||
fprintf(out, "%s [OPTIONS]\n\n", name);
|
||||
fprintf(out, "Options:\n");
|
||||
fprintf(out, " -h, --help print this help.\n");
|
||||
fprintf(out, " -m, --method KEM method.\n");
|
||||
fprintf(out, " -c, --count number of structs (default 4).\n");
|
||||
fprintf(out, " -i, --in=FILE request file (default STDIN).\n");
|
||||
fprintf(out, " -o, --out=FILE response file (default STDOUT).\n");
|
||||
fprintf(out, "\n");
|
||||
}
|
||||
|
||||
int main(int argc, char *argv[])
|
||||
{
|
||||
FILE *in = stdin;
|
||||
FILE *out = stdout;
|
||||
char line[90000], *method = "", *pos, *eol, *param, *value;
|
||||
size_t param_len, value_len;
|
||||
int count = 4, n;
|
||||
|
||||
library_init(NULL, "nist-kem-kat");
|
||||
atexit(library_deinit);
|
||||
|
||||
while (true)
|
||||
{
|
||||
struct option long_opts[] = {
|
||||
{"help", no_argument, NULL, 'h' },
|
||||
{"method", required_argument, NULL, 'm' },
|
||||
{"count", required_argument, NULL, 'c' },
|
||||
{"in", required_argument, NULL, 'i' },
|
||||
{"out", required_argument, NULL, 'o' },
|
||||
{0,0,0,0 },
|
||||
};
|
||||
switch (getopt_long(argc, argv, "h:m:c:i:o:", long_opts, NULL))
|
||||
{
|
||||
case EOF:
|
||||
break;
|
||||
case 'h':
|
||||
usage(stdout, argv[0]);
|
||||
return 0;
|
||||
case 'm':
|
||||
method = optarg;
|
||||
continue;
|
||||
case 'c':
|
||||
count = atoi(optarg);
|
||||
continue;
|
||||
case 'i':
|
||||
in = fopen(optarg, "r");
|
||||
if (!in)
|
||||
{
|
||||
fprintf(stderr, "failed to open '%s': %s\n", optarg,
|
||||
strerror(errno));
|
||||
usage(stderr, argv[0]);
|
||||
return 1;
|
||||
}
|
||||
continue;
|
||||
case 'o':
|
||||
out = fopen(optarg, "w");
|
||||
if (!out)
|
||||
{
|
||||
fprintf(stderr, "failed to open '%s': %s\n", optarg,
|
||||
strerror(errno));
|
||||
usage(stderr, argv[0]);
|
||||
return 1;
|
||||
}
|
||||
continue;
|
||||
default:
|
||||
usage(stderr, argv[0]);
|
||||
return 1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
while (fgets(line, sizeof(line), in))
|
||||
{
|
||||
pos = strchr(line, '=');
|
||||
if (!pos)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
/*remove preceding whitespace from value */
|
||||
value = pos + 1;
|
||||
eol = strchr(value, '\n');
|
||||
if (!eol)
|
||||
{
|
||||
fprintf(stderr, "eol not found\n");
|
||||
break;
|
||||
}
|
||||
value_len = eol - value;
|
||||
while (value_len && *value == ' ')
|
||||
{
|
||||
value++;
|
||||
value_len--;
|
||||
}
|
||||
|
||||
/* remove trailing whitespace from param */
|
||||
param = line;
|
||||
param_len = pos - line;
|
||||
while (param_len && *(--pos) == ' ')
|
||||
{
|
||||
param_len--;
|
||||
}
|
||||
param[param_len] = '\0';
|
||||
|
||||
if (streq(param, "sk"))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (streq(param, "count"))
|
||||
{
|
||||
if (count == 0)
|
||||
{
|
||||
break;
|
||||
}
|
||||
fprintf(out, "/** count = %.*s */\n", (int)value_len, value);
|
||||
fprintf(out, "{\n");
|
||||
fprintf(out, "\t.method = %s,\n", method);
|
||||
count--;
|
||||
}
|
||||
else
|
||||
{
|
||||
fprintf(out, "\t.%s = chunk_from_chars(", param);
|
||||
n = 0;
|
||||
|
||||
while (value_len > 1)
|
||||
{
|
||||
if (n > 0)
|
||||
{
|
||||
fprintf(out, ",");
|
||||
if (n % 100 == 0)
|
||||
{
|
||||
fprintf(out, " /* %d */\n", n);
|
||||
}
|
||||
}
|
||||
if (n % 10 == 0)
|
||||
{
|
||||
fprintf(out, "\n\t\t");
|
||||
}
|
||||
fprintf(out, "0x%.2s", value);
|
||||
value += 2;
|
||||
value_len -= 2;
|
||||
n++;
|
||||
}
|
||||
fprintf(out, "),\n");
|
||||
if (streq(param, "ss"))
|
||||
{
|
||||
fprintf(out, "},\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (in != stdin)
|
||||
{
|
||||
fclose(in);
|
||||
}
|
||||
if (out != stdout)
|
||||
{
|
||||
fclose(out);
|
||||
}
|
||||
return 0;
|
||||
}
|
199
scripts/test.sh
199
scripts/test.sh
@ -4,7 +4,7 @@
|
||||
build_botan()
|
||||
{
|
||||
# same revision used in the build recipe of the testing environment
|
||||
BOTAN_REV=3.3.0
|
||||
BOTAN_REV=3.7.1
|
||||
BOTAN_DIR=$DEPS_BUILD_DIR/botan
|
||||
|
||||
if test -d "$BOTAN_DIR"; then
|
||||
@ -28,8 +28,8 @@ build_botan()
|
||||
git clone https://github.com/randombit/botan.git $BOTAN_DIR &&
|
||||
cd $BOTAN_DIR &&
|
||||
git checkout -qf $BOTAN_REV &&
|
||||
python ./configure.py --amalgamation $BOTAN_CONFIG &&
|
||||
make -j4 libs >/dev/null &&
|
||||
./configure.py --amalgamation $BOTAN_CONFIG &&
|
||||
make -j$(nproc) libs >/dev/null &&
|
||||
sudo make install >/dev/null &&
|
||||
sudo ldconfig || exit $?
|
||||
cd -
|
||||
@ -37,7 +37,7 @@ build_botan()
|
||||
|
||||
build_wolfssl()
|
||||
{
|
||||
WOLFSSL_REV=v5.6.4-stable
|
||||
WOLFSSL_REV=v5.8.2-stable
|
||||
WOLFSSL_DIR=$DEPS_BUILD_DIR/wolfssl
|
||||
|
||||
if test -d "$WOLFSSL_DIR"; then
|
||||
@ -47,21 +47,22 @@ build_wolfssl()
|
||||
echo "$ build_wolfssl()"
|
||||
|
||||
WOLFSSL_CFLAGS="-DWOLFSSL_PUBLIC_MP -DWOLFSSL_DES_ECB -DHAVE_AES_ECB \
|
||||
-DHAVE_ECC_BRAINPOOL -DWOLFSSL_MIN_AUTH_TAG_SZ=8"
|
||||
-DHAVE_ECC_BRAINPOOL -DWOLFSSL_MIN_AUTH_TAG_SZ=8 \
|
||||
-DRSA_MIN_SIZE=1024"
|
||||
WOLFSSL_CONFIG="--prefix=$DEPS_PREFIX
|
||||
--disable-crypttests --disable-examples
|
||||
--enable-aesccm --enable-aesctr --enable-camellia
|
||||
--enable-curve25519 --enable-curve448 --enable-des3
|
||||
--enable-ecccustcurves --enable-ed25519 --enable-ed448
|
||||
--enable-keygen --with-max-rsa-bits=8192 --enable-md4
|
||||
--enable-rsapss --enable-sha3 --enable-shake256"
|
||||
--enable-keygen --enable-mlkem --with-max-rsa-bits=8192
|
||||
--enable-md4 --enable-rsapss --enable-sha3 --enable-shake256"
|
||||
|
||||
git clone https://github.com/wolfSSL/wolfssl.git $WOLFSSL_DIR &&
|
||||
cd $WOLFSSL_DIR &&
|
||||
git checkout -qf $WOLFSSL_REV &&
|
||||
./autogen.sh &&
|
||||
./configure C_EXTRA_FLAGS="$WOLFSSL_CFLAGS" $WOLFSSL_CONFIG &&
|
||||
make -j4 >/dev/null &&
|
||||
make -j$(nproc) >/dev/null &&
|
||||
sudo make install >/dev/null &&
|
||||
sudo ldconfig || exit $?
|
||||
cd -
|
||||
@ -69,7 +70,7 @@ build_wolfssl()
|
||||
|
||||
build_tss2()
|
||||
{
|
||||
TSS2_REV=3.2.2
|
||||
TSS2_REV=3.2.3
|
||||
TSS2_PKG=tpm2-tss-$TSS2_REV
|
||||
TSS2_DIR=$DEPS_BUILD_DIR/$TSS2_PKG
|
||||
TSS2_SRC=https://github.com/tpm2-software/tpm2-tss/releases/download/$TSS2_REV/$TSS2_PKG.tar.gz
|
||||
@ -83,7 +84,7 @@ build_tss2()
|
||||
curl -L $TSS2_SRC | tar xz -C $DEPS_BUILD_DIR &&
|
||||
cd $TSS2_DIR &&
|
||||
./configure --prefix=$DEPS_PREFIX --disable-doxygen-doc &&
|
||||
make -j4 >/dev/null &&
|
||||
make -j$(nproc) >/dev/null &&
|
||||
sudo make install >/dev/null &&
|
||||
sudo ldconfig || exit $?
|
||||
cd -
|
||||
@ -91,26 +92,30 @@ build_tss2()
|
||||
|
||||
build_openssl()
|
||||
{
|
||||
SSL_REV=3.1.1
|
||||
SSL_PKG=openssl-$SSL_REV
|
||||
SSL_DIR=$DEPS_BUILD_DIR/$SSL_PKG
|
||||
SSL_SRC=https://www.openssl.org/source/$SSL_PKG.tar.gz
|
||||
SSL_REV=openssl-3.5.1
|
||||
SSL_DIR=$DEPS_BUILD_DIR/openssl
|
||||
SSL_INS=$DEPS_PREFIX/ssl
|
||||
SSL_OPT="-d shared no-dtls no-ssl3 no-zlib no-idea no-psk no-srp
|
||||
SSL_OPT="-d shared no-dtls no-ssl3 no-zlib no-idea no-psk
|
||||
no-tests enable-rfc3779 enable-ec_nistp_64_gcc_128"
|
||||
|
||||
if test -d "$SSL_DIR"; then
|
||||
return
|
||||
fi
|
||||
|
||||
# insist on compiling with gcc and debug information as symbols are otherwise not found
|
||||
if test "$LEAK_DETECTIVE" = "yes"; then
|
||||
SSL_OPT="$SSL_OPT CC=gcc -d"
|
||||
# insist on compiling with gcc and debug information as symbols are
|
||||
# otherwise not found, but we can disable SRP (see below)
|
||||
SSL_OPT="$SSL_OPT no-srp CC=gcc -d"
|
||||
elif test "$CC" != "clang"; then
|
||||
# when using ASan with clang, llvm-symbolizer is used to resolve symbols
|
||||
# and this tool links libcurl, which in turn requires SRP, so we can
|
||||
# only disable it when not building with clang
|
||||
SSL_OPT="$SSL_OPT no-srp"
|
||||
fi
|
||||
|
||||
echo "$ build_openssl()"
|
||||
|
||||
curl -L $SSL_SRC | tar xz -C $DEPS_BUILD_DIR || exit $?
|
||||
git clone https://github.com/openssl/openssl.git --depth 1 -b $SSL_REV $SSL_DIR || exit $?
|
||||
|
||||
if [ "$TEST" = "android" ]; then
|
||||
OPENSSL_SRC=${SSL_DIR} \
|
||||
@ -118,7 +123,7 @@ build_openssl()
|
||||
else
|
||||
cd $SSL_DIR &&
|
||||
./config --prefix=$SSL_INS --openssldir=$SSL_INS --libdir=lib $SSL_OPT &&
|
||||
make -j4 >/dev/null &&
|
||||
make -j$(nproc) >/dev/null &&
|
||||
sudo make install_sw >/dev/null &&
|
||||
sudo ldconfig || exit $?
|
||||
cd -
|
||||
@ -127,7 +132,7 @@ build_openssl()
|
||||
|
||||
build_awslc()
|
||||
{
|
||||
LC_REV=1.22.0
|
||||
LC_REV=1.55.0
|
||||
LC_PKG=aws-lc-$LC_REV
|
||||
LC_DIR=$DEPS_BUILD_DIR/$LC_PKG
|
||||
LC_SRC=https://github.com/aws/aws-lc/archive/refs/tags/v${LC_REV}.tar.gz
|
||||
@ -172,7 +177,7 @@ system_uses_openssl3()
|
||||
|
||||
prepare_system_openssl()
|
||||
{
|
||||
# On systems that ship OpenSSL 3 (e.g. Ubuntu 22.04), we require debug
|
||||
# On systems that ship OpenSSL 3 (e.g. Ubuntu 22.04+), we require debug
|
||||
# symbols to whitelist leaks
|
||||
if test "$1" = "deps"; then
|
||||
echo "deb http://ddebs.ubuntu.com $(lsb_release -cs) main restricted
|
||||
@ -180,19 +185,24 @@ prepare_system_openssl()
|
||||
deb http://ddebs.ubuntu.com $(lsb_release -cs)-proposed main restricted" | \
|
||||
sudo tee -a /etc/apt/sources.list.d/ddebs.list
|
||||
sudo apt-get install -qq ubuntu-dbgsym-keyring
|
||||
DEPS="$DEPS libssl3-dbgsym"
|
||||
if [ "$ID" = "ubuntu" -a "$VERSION_ID" = "24.04" ]; then
|
||||
DEPS="$DEPS libssl3t64-dbgsym"
|
||||
else
|
||||
DEPS="$DEPS libssl3-dbgsym"
|
||||
fi
|
||||
fi
|
||||
if test "$LEAK_DETECTIVE" = "yes"; then
|
||||
# make sure we can properly whitelist functions with leak detective
|
||||
DEPS="$DEPS binutils-dev"
|
||||
CONFIG="$CONFIG --enable-bfd-backtraces"
|
||||
else
|
||||
elif [ "$ID" = "ubuntu" -a "$VERSION_ID" != "24.04" ]; then
|
||||
# with ASan we have to use the (extremely) slow stack unwind as the
|
||||
# shipped version of the library is built with -fomit-frame-pointer
|
||||
export ASAN_OPTIONS=fast_unwind_on_malloc=0
|
||||
fi
|
||||
}
|
||||
|
||||
: ${SRC_DIR=$PWD}
|
||||
: ${BUILD_DIR=$PWD}
|
||||
: ${DEPS_BUILD_DIR=$BUILD_DIR/..}
|
||||
: ${DEPS_PREFIX=/usr/local}
|
||||
@ -213,10 +223,13 @@ case "$TEST" in
|
||||
default)
|
||||
# should be the default, but lets make sure
|
||||
CONFIG="--with-printf-hooks=glibc"
|
||||
if system_uses_openssl3; then
|
||||
prepare_system_openssl $1
|
||||
fi
|
||||
;;
|
||||
openssl*)
|
||||
CONFIG="--disable-defaults --enable-pki --enable-openssl --enable-pem"
|
||||
export TESTS_PLUGINS="test-vectors openssl! pem"
|
||||
CONFIG="--disable-defaults --enable-pki --enable-openssl --enable-pem --enable-drbg"
|
||||
export TESTS_PLUGINS="test-vectors openssl! pem drbg"
|
||||
DEPS="libssl-dev"
|
||||
if test "$TEST" = "openssl-3"; then
|
||||
DEPS=""
|
||||
@ -226,6 +239,9 @@ openssl*)
|
||||
use_custom_openssl $1
|
||||
elif system_uses_openssl3; then
|
||||
prepare_system_openssl $1
|
||||
else
|
||||
# the kdf plugin is necessary to build against older OpenSSL versions
|
||||
TESTS_PLUGINS="$TESTS_PLUGINS kdf"
|
||||
fi
|
||||
;;
|
||||
gcrypt)
|
||||
@ -234,16 +250,16 @@ gcrypt)
|
||||
DEPS="libgcrypt20-dev"
|
||||
;;
|
||||
botan)
|
||||
CONFIG="--disable-defaults --enable-pki --enable-botan --enable-pem --enable-hmac --enable-x509 --enable-constraints"
|
||||
export TESTS_PLUGINS="test-vectors botan! pem hmac x509 constraints"
|
||||
CONFIG="--disable-defaults --enable-pki --enable-botan --enable-pem --enable-hmac --enable-x509 --enable-constraints --enable-drbg"
|
||||
export TESTS_PLUGINS="test-vectors botan! pem hmac x509 constraints drbg"
|
||||
DEPS=""
|
||||
if test "$1" = "build-deps"; then
|
||||
build_botan
|
||||
fi
|
||||
;;
|
||||
wolfssl)
|
||||
CONFIG="--disable-defaults --enable-pki --enable-wolfssl --enable-pem --enable-pkcs1 --enable-pkcs8 --enable-x509 --enable-constraints"
|
||||
export TESTS_PLUGINS="test-vectors wolfssl! pem pkcs1 pkcs8 x509 constraints"
|
||||
CONFIG="--disable-defaults --enable-pki --enable-wolfssl --enable-pem --enable-pkcs1 --enable-pkcs8 --enable-x509 --enable-constraints --enable-drbg"
|
||||
export TESTS_PLUGINS="test-vectors wolfssl! pem pkcs1 pkcs8 x509 constraints drbg"
|
||||
# build with custom options to enable all the features the plugin supports
|
||||
DEPS=""
|
||||
if test "$1" = "build-deps"; then
|
||||
@ -252,15 +268,11 @@ wolfssl)
|
||||
;;
|
||||
printf-builtin)
|
||||
CONFIG="--with-printf-hooks=builtin"
|
||||
;;
|
||||
all|codeql|coverage|sonarcloud|no-dbg)
|
||||
if [ "$TEST" = "sonarcloud" ]; then
|
||||
if [ -z "$SONAR_PROJECT" -o -z "$SONAR_ORGANIZATION" -o -z "$SONAR_TOKEN" ]; then
|
||||
echo "The SONAR_PROJECT, SONAR_ORGANIZATION and SONAR_TOKEN" \
|
||||
"environment variables are required to run this test"
|
||||
exit 1
|
||||
fi
|
||||
if system_uses_openssl3; then
|
||||
prepare_system_openssl $1
|
||||
fi
|
||||
;;
|
||||
all|alpine|codeql|coverage|sonarcloud|no-dbg|no-testable-ke)
|
||||
if [ "$TEST" = "codeql" ]; then
|
||||
# don't run tests, only analyze built code
|
||||
TARGET=
|
||||
@ -271,33 +283,50 @@ all|codeql|coverage|sonarcloud|no-dbg)
|
||||
CONFIG="--enable-all --disable-android-dns --disable-android-log
|
||||
--disable-kernel-pfroute --disable-keychain
|
||||
--disable-lock-profiler --disable-padlock --disable-fuzzing
|
||||
--disable-osx-attr --disable-tkm --disable-uci
|
||||
--disable-osx-attr --disable-tkm
|
||||
--disable-unwind-backtraces
|
||||
--disable-svc --disable-dbghelp-backtraces --disable-socket-win
|
||||
--disable-kernel-wfp --disable-kernel-iph --disable-winhttp
|
||||
--disable-python-eggs-install"
|
||||
--disable-kernel-wfp --disable-kernel-iph --disable-winhttp"
|
||||
# not enabled on the build server
|
||||
CONFIG="$CONFIG --disable-af-alg"
|
||||
# unable to build Botan on Ubuntu 20.04
|
||||
if [ "$ID" = "ubuntu" -a "$VERSION_ID" = "20.04" ]; then
|
||||
CONFIG="$CONFIG --disable-botan"
|
||||
fi
|
||||
if test "$TEST" != "coverage"; then
|
||||
CONFIG="$CONFIG --disable-coverage"
|
||||
else
|
||||
# not actually required but configure checks for it
|
||||
DEPS="$DEPS lcov"
|
||||
TARGET="coverage"
|
||||
fi
|
||||
DEPS="$DEPS libcurl4-gnutls-dev libsoup2.4-dev libunbound-dev libldns-dev
|
||||
if [ "$TEST" = "no-testable-ke" ]; then
|
||||
CONFIG="$CONFIG --without-testable-ke"
|
||||
fi
|
||||
DEPS="$DEPS libcurl4-gnutls-dev libsoup-3.0-dev libunbound-dev libldns-dev
|
||||
libmysqlclient-dev libsqlite3-dev clearsilver-dev libfcgi-dev
|
||||
libldap2-dev libpcsclite-dev libpam0g-dev binutils-dev libnm-dev
|
||||
libgcrypt20-dev libjson-c-dev python3-pip libtspi-dev libsystemd-dev
|
||||
libselinux1-dev libiptc-dev"
|
||||
PYDEPS="tox"
|
||||
libgcrypt20-dev libjson-c-dev libtspi-dev libsystemd-dev
|
||||
libselinux1-dev libiptc-dev ruby-rubygems python3-build tox"
|
||||
if [ "$ID" = "ubuntu" -a "$VERSION_ID" = "22.04" -a "$1" = "build-deps" ]; then
|
||||
# python3-build is broken on 22.04 with venv (https://bugs.launchpad.net/ubuntu/+source/python-build/+bug/1992108)
|
||||
# while installing python3-virtualenv should help, it doesn't. as even
|
||||
# after uninstalling python3-venv, build prefers the latter
|
||||
sudo python3 -m pip install --upgrade build
|
||||
fi
|
||||
if [ "$TEST" = "alpine" ]; then
|
||||
# override the whole list for alpine
|
||||
DEPS="git gmp-dev openldap-dev curl-dev ldns-dev unbound-dev libsoup3-dev
|
||||
libxml2-dev tpm2-tss-dev tpm2-tss-sys mariadb-dev wolfssl-dev
|
||||
libgcrypt-dev botan3-dev pcsc-lite-dev networkmanager-dev
|
||||
linux-pam-dev iptables-dev libselinux-dev binutils-dev libunwind-dev
|
||||
ruby py3-setuptools py3-build py3-tox"
|
||||
# musl does not provide backtrace(), so use libunwind
|
||||
CONFIG="$CONFIG --enable-unwind-backtraces"
|
||||
# alpine doesn't have systemd
|
||||
CONFIG="$CONFIG --disable-systemd --disable-cert-enroll-timer"
|
||||
# no TrouSerS either
|
||||
CONFIG="$CONFIG --disable-tss-trousers --disable-aikgen"
|
||||
# and no Clearsilver
|
||||
CONFIG="$CONFIG --disable-fast --disable-manager --disable-medsrv"
|
||||
fi
|
||||
if test "$1" = "build-deps"; then
|
||||
if [ "$ID" = "ubuntu" -a "$VERSION_ID" != "20.04" ]; then
|
||||
build_botan
|
||||
fi
|
||||
build_botan
|
||||
build_wolfssl
|
||||
build_tss2
|
||||
fi
|
||||
@ -320,13 +349,6 @@ win*)
|
||||
TARGET=
|
||||
else
|
||||
CONFIG="$CONFIG --enable-openssl"
|
||||
case "$IMG" in
|
||||
2015|2017)
|
||||
# old OpenSSL versions don't provide HKDF
|
||||
CONFIG="$CONFIG --enable-kdf"
|
||||
;;
|
||||
esac
|
||||
|
||||
CFLAGS="$CFLAGS -I$OPENSSL_DIR/include"
|
||||
LDFLAGS="-L$OPENSSL_DIR/lib"
|
||||
case "$IMG" in
|
||||
@ -372,10 +394,9 @@ macos)
|
||||
--enable-socket-default --enable-sshkey --enable-stroke
|
||||
--enable-swanctl --enable-unity --enable-updown
|
||||
--enable-x509 --enable-xauth-generic"
|
||||
DEPS="automake autoconf libtool bison gettext gperf pkg-config openssl@1.1 curl"
|
||||
DEPS="automake autoconf libtool bison gperf pkgconf openssl@1.1 curl"
|
||||
BREW_PREFIX=$(brew --prefix)
|
||||
export PATH=$BREW_PREFIX/opt/bison/bin:$PATH
|
||||
export ACLOCAL_PATH=$BREW_PREFIX/opt/gettext/share/aclocal:$ACLOCAL_PATH
|
||||
for pkg in openssl@1.1 curl
|
||||
do
|
||||
PKG_CONFIG_PATH=$BREW_PREFIX/opt/$pkg/lib/pkgconfig:$PKG_CONFIG_PATH
|
||||
@ -429,7 +450,11 @@ fuzzing)
|
||||
;;
|
||||
nm)
|
||||
DEPS="gnome-common libsecret-1-dev libgtk-3-dev libnm-dev libnma-dev"
|
||||
cd src/frontends/gnome
|
||||
ORIG_SRC_DIR="$SRC_DIR"
|
||||
SRC_DIR="$ORIG_SRC_DIR/src/frontends/gnome"
|
||||
if [ "$ORIG_SRC_DIR" = "$BUILD_DIR" ]; then
|
||||
BUILD_DIR="$SRC_DIR"
|
||||
fi
|
||||
# don't run ./configure with ./autogen.sh
|
||||
export NOCONFIGURE=1
|
||||
;;
|
||||
@ -451,8 +476,12 @@ case "$1" in
|
||||
deps)
|
||||
case "$OS_NAME" in
|
||||
linux)
|
||||
sudo apt-get update -qq && \
|
||||
sudo apt-get install -qq bison flex gperf gettext $DEPS
|
||||
sudo apt-get update -y && \
|
||||
sudo apt-get install -y automake autoconf libtool pkgconf bison flex gperf $DEPS
|
||||
;;
|
||||
alpine)
|
||||
apk add --no-cache build-base automake autoconf libtool pkgconfig && \
|
||||
apk add --no-cache bison flex gperf tzdata $DEPS
|
||||
;;
|
||||
macos)
|
||||
brew update && \
|
||||
@ -460,15 +489,11 @@ deps)
|
||||
;;
|
||||
freebsd)
|
||||
pkg install -y automake autoconf libtool pkgconf && \
|
||||
pkg install -y bison flex gperf gettext $DEPS
|
||||
pkg install -y bison flex gperf $DEPS
|
||||
;;
|
||||
esac
|
||||
exit $?
|
||||
;;
|
||||
pydeps)
|
||||
test -z "$PYDEPS" || pip3 -q install --user $PYDEPS
|
||||
exit $?
|
||||
;;
|
||||
build-deps)
|
||||
exit
|
||||
;;
|
||||
@ -484,21 +509,28 @@ CONFIG="$CONFIG
|
||||
--enable-leak-detective=${LEAK_DETECTIVE-no}"
|
||||
|
||||
case "$TEST" in
|
||||
codeql|coverage|freebsd|fuzzing|sonarcloud|win*)
|
||||
alpine|codeql|coverage|freebsd|fuzzing|sonarcloud|win*)
|
||||
# don't use AddressSanitizer if it's not available or causes conflicts
|
||||
CONFIG="$CONFIG --disable-asan"
|
||||
;;
|
||||
*)
|
||||
if [ "$LEAK_DETECTIVE" != "yes" ]; then
|
||||
CONFIG="$CONFIG --enable-asan"
|
||||
else
|
||||
CONFIG="$CONFIG --disable-asan"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo "$ ./autogen.sh"
|
||||
./autogen.sh || exit $?
|
||||
cd $SRC_DIR
|
||||
if [ ! -f ./configure ]; then
|
||||
echo "$ ./autogen.sh"
|
||||
./autogen.sh || exit $?
|
||||
fi
|
||||
|
||||
cd $BUILD_DIR
|
||||
echo "$ CC=$CC CFLAGS=\"$CFLAGS\" ./configure $CONFIG"
|
||||
CC="$CC" CFLAGS="$CFLAGS" ./configure $CONFIG || exit $?
|
||||
CC="$CC" CFLAGS="$CFLAGS" $SRC_DIR/configure $CONFIG || exit $?
|
||||
|
||||
case "$TEST" in
|
||||
apidoc)
|
||||
@ -513,10 +545,10 @@ case "$TEST" in
|
||||
sonarcloud)
|
||||
# without target, coverage is currently not supported anyway because
|
||||
# sonarqube only supports gcov, not lcov
|
||||
build-wrapper-linux-x86-64 --out-dir bw-output make -j4 || exit $?
|
||||
build-wrapper-linux-x86-64 --out-dir $BUILD_WRAPPER_OUT_DIR make -j$(nproc) || exit $?
|
||||
;;
|
||||
*)
|
||||
make -j4 $TARGET || exit $?
|
||||
make -j$(nproc) $TARGET || exit $?
|
||||
;;
|
||||
esac
|
||||
|
||||
@ -528,23 +560,9 @@ apidoc)
|
||||
fi
|
||||
rm make.warnings
|
||||
;;
|
||||
sonarcloud)
|
||||
sonar-scanner \
|
||||
-Dsonar.host.url=https://sonarcloud.io \
|
||||
-Dsonar.projectKey=${SONAR_PROJECT} \
|
||||
-Dsonar.organization=${SONAR_ORGANIZATION} \
|
||||
-Dsonar.token=${SONAR_TOKEN} \
|
||||
-Dsonar.projectVersion=$(git describe --exclude 'android-*')+${BUILD_NUMBER} \
|
||||
-Dsonar.sources=. \
|
||||
-Dsonar.cfamily.threads=2 \
|
||||
-Dsonar.cfamily.analysisCache.mode=fs \
|
||||
-Dsonar.cfamily.analysisCache.path=$HOME/.sonar-cache \
|
||||
-Dsonar.cfamily.build-wrapper-output=bw-output || exit $?
|
||||
rm -r bw-output .scannerwork
|
||||
;;
|
||||
android)
|
||||
rm -r strongswan-*
|
||||
cd src/frontends/android
|
||||
cd $SRC_DIR/src/frontends/android
|
||||
echo "$ ./gradlew build"
|
||||
NDK_CCACHE=ccache ./gradlew build --info || exit $?
|
||||
;;
|
||||
@ -552,6 +570,7 @@ android)
|
||||
;;
|
||||
esac
|
||||
|
||||
cd $SRC_DIR
|
||||
# ensure there are no unignored build artifacts (or other changes) in the Git repo
|
||||
unclean="$(git status --porcelain)"
|
||||
if test -n "$unclean"; then
|
||||
|
@ -1,3 +1,5 @@
|
||||
sonar.sources=.
|
||||
|
||||
# exclude these files completely
|
||||
sonar.exclusions=\
|
||||
src/manager/templates/static/jquery.js, \
|
||||
@ -29,16 +31,25 @@ sonar.issue.ignore.allfile.a2.fileRegexp=made by GNU Bison
|
||||
sonar.issue.ignore.allfile.a3.fileRegexp=produced by gperf
|
||||
|
||||
# ignore some rules
|
||||
sonar.issue.ignore.multicriteria=m1,m2,m3,m4,m5,m6
|
||||
sonar.issue.ignore.multicriteria=m1,m2,m3,m4,m5,m6,m7
|
||||
# Multiple variables should not be declared on the same line
|
||||
sonar.issue.ignore.multicriteria.m1.ruleKey=c:S1659
|
||||
sonar.issue.ignore.multicriteria.m1.resourceKey=**/*
|
||||
# Functions should not be defined with a variable number of arguments
|
||||
sonar.issue.ignore.multicriteria.m2.ruleKey=c:S923
|
||||
sonar.issue.ignore.multicriteria.m2.resourceKey=**/*
|
||||
# Function names should be used either as a call with a parameter list or with the "&" operator
|
||||
sonar.issue.ignore.multicriteria.m3.ruleKey=c:S936
|
||||
sonar.issue.ignore.multicriteria.m3.resourceKey=**/*
|
||||
# Unused function parameters should be removed
|
||||
sonar.issue.ignore.multicriteria.m4.ruleKey=c:S1172
|
||||
sonar.issue.ignore.multicriteria.m4.resourceKey=**/*
|
||||
# Single line comments should start with "--"
|
||||
sonar.issue.ignore.multicriteria.m5.ruleKey=plsql:SingleLineCommentsSyntaxCheck
|
||||
sonar.issue.ignore.multicriteria.m5.resourceKey=**/*
|
||||
# User-defined types should not be passed as variadic arguments
|
||||
sonar.issue.ignore.multicriteria.m6.ruleKey=c:S5270
|
||||
sonar.issue.ignore.multicriteria.m6.resourceKey=**/*
|
||||
# Loop variables should be declared in the minimal possible scope
|
||||
sonar.issue.ignore.multicriteria.m7.ruleKey=c:S5955
|
||||
sonar.issue.ignore.multicriteria.m7.resourceKey=**/*
|
||||
|
@ -28,6 +28,8 @@ cert_install_availabledir = $(sysconfdir)/cert-enroll.d/cert-install-available
|
||||
cert_install_available_DATA = \
|
||||
cert-install-ssl \
|
||||
cert-install-sssd \
|
||||
cert-install-ldaputils \
|
||||
cert-install-cockpit \
|
||||
cert-install-dirsrv \
|
||||
cert-install-lighttpd \
|
||||
cert-install-openxpki \
|
||||
@ -41,9 +43,10 @@ cert-install-ipsec : cert-install-ipsec.in
|
||||
|
||||
EXTRA_DIST = \
|
||||
cert-enroll.conf cert-enroll.in cert-enroll.service.in cert-enroll.timer \
|
||||
cert-install-dirsrv cert-install-gitea cert-install-ipsec.in \
|
||||
cert-install-lighttpd cert-install-openxpki cert-install-ssl \
|
||||
cert-install-sssd cert-install-swanctl.in
|
||||
cert-install-cockpit cert-install-dirsrv cert-install-gitea \
|
||||
cert-install-ipsec.in cert-install-ldaputils cert-install-lighttpd \
|
||||
cert-install-openxpki cert-install-ssl cert-install-sssd \
|
||||
cert-install-swanctl.in
|
||||
|
||||
man8_MANS = cert-enroll.8
|
||||
|
||||
|
@ -22,8 +22,10 @@
|
||||
: ${CAOUT=cacert}
|
||||
: ${ROOTCA=$CAOUT.pem}
|
||||
: ${OLDROOTCA=$CAOUT-old.pem}
|
||||
: ${OLDERROOTCA=$CAOUT-older.pem}
|
||||
: ${SUBCA=$CAOUT-1.pem}
|
||||
: ${OLDSUBCA=$CAOUT-1-old.pem}
|
||||
: ${OLDERSUBCA=$CAOUT-1-older.pem}
|
||||
: ${RAOUT=racert}
|
||||
: ${RACERT=$RAOUT.pem}
|
||||
|
||||
|
@ -155,15 +155,23 @@ function check_ca_certs()
|
||||
if [ $ROOTCA_CHANGED -ne 0 ]
|
||||
then
|
||||
echo "Warning: '$ROOTCA' has changed"
|
||||
if [ -s old/$ROOTCA ]
|
||||
then
|
||||
mv old/$ROOTCA older
|
||||
fi
|
||||
mv $ROOTCA old
|
||||
mv new/$ROOTCA .
|
||||
fi
|
||||
|
||||
SUBCA_CHANGED=0
|
||||
cmp -s $SUBCA new/$SUBCA || SUBCA_CHANGE=$?
|
||||
cmp -s $SUBCA new/$SUBCA || SUBCA_CHANGED=$?
|
||||
if [ $SUBCA_CHANGED -ne 0 ]
|
||||
then
|
||||
echo "Warning: '$SUBCA' has changed"
|
||||
if [ -s old/$SUBCA ]
|
||||
then
|
||||
mv old/$SUBCA older
|
||||
fi
|
||||
mv $SUBCA old
|
||||
mv new/$SUBCA .
|
||||
fi
|
||||
@ -192,6 +200,7 @@ function install_certs()
|
||||
KEYTYPE="$KEYTYPE" CERTDIR="$CERTDIR" HOSTKEY="$HOSTKEY" \
|
||||
HOSTCERT="$HOSTCERT" ROOTCA="$ROOTCA" SUBCA="$SUBCA" \
|
||||
OLDROOTCA="$OLDROOTCA" OLDSUBCA="$OLDSUBCA" \
|
||||
OLDERROOTCA="$OLDERROOTCA" OLDERSUBCA="$OLDERSUBCA" \
|
||||
USER_GROUP="$USER_GROUP" SERVICE="$SERVICE" \
|
||||
/bin/bash $script || status=$?
|
||||
if [ $status -ne 0 ]
|
||||
@ -251,7 +260,7 @@ esac
|
||||
##############################################################################
|
||||
# Create and change into certificates directory
|
||||
#
|
||||
mkdir -p $CERTDIR/new $CERTDIR/old
|
||||
mkdir -p $CERTDIR/new $CERTDIR/old $CERTDIR/older
|
||||
cd $CERTDIR
|
||||
echo " changed into the '$CERTDIR' directory"
|
||||
|
||||
|
50
src/cert-enroll/cert-install-cockpit
Normal file
50
src/cert-enroll/cert-install-cockpit
Normal file
@ -0,0 +1,50 @@
|
||||
#!/bin/bash
|
||||
# Install the generated key and certificate as TLS credentials for the Cockpit
|
||||
# management interface.
|
||||
#
|
||||
# Copyright (C) 2024 Andreas Steffen
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
set -e
|
||||
|
||||
##############################################################################
|
||||
# Set local paths
|
||||
#
|
||||
|
||||
# Path to the cockpit credentials
|
||||
COCKPIT="/etc/cockpit/ws-certs.d"
|
||||
|
||||
##############################################################################
|
||||
# Change into the certificates directory
|
||||
#
|
||||
cd $CERTDIR
|
||||
|
||||
##############################################################################
|
||||
# Install the private key and certificate
|
||||
#
|
||||
cp $HOSTKEY $COCKPIT/ldap.key
|
||||
cp $HOSTCERT $COCKPIT/ldap.crt
|
||||
|
||||
##############################################################################
|
||||
# Restart the cockpit systemd service
|
||||
#
|
||||
/usr/bin/systemctl restart cockpit.service
|
||||
exit 0
|
||||
|
@ -88,6 +88,16 @@ then
|
||||
$CERTUTIL -d . -A -t "CT,," -n "Old Sub CA" -i $CERTDIR/old/$SUBCA \
|
||||
-f passwd.txt
|
||||
fi
|
||||
if [ -s $CERTDIR/older/$ROOTCA ]
|
||||
then
|
||||
$CERTUTIL -d . -A -t "CT,," -n "Older Root CA" -i $CERTDIR/older/$ROOTCA \
|
||||
-f passwd.txt
|
||||
fi
|
||||
if [ -s $CERTDIR/older/$SUBCA ]
|
||||
then
|
||||
$CERTUTIL -d . -A -t "CT,," -n "Older Sub CA" -i $CERTDIR/older/$SUBCA \
|
||||
-f passwd.txt
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Move the generated credentials to the correct place and delete the build dir
|
||||
|
@ -53,6 +53,22 @@ cp $HOSTCERT $IPSECDIR/certs
|
||||
# Install the CA certificates
|
||||
#
|
||||
cp $ROOTCA $SUBCA $IPSECDIR/cacerts
|
||||
if [ -s old/$ROOTCA ]
|
||||
then
|
||||
cp old/$ROOTCA $IPSECDIR/cacerts/$OLDROOTCA
|
||||
fi
|
||||
if [ -s old/$SUBCA ]
|
||||
then
|
||||
cp old/$SUBCA $IPSECDIR/cacerts/$OLDSUBCA
|
||||
fi
|
||||
if [ -s older/$ROOTCA ]
|
||||
then
|
||||
cp older/$ROOTCA $IPSECDIR/cacerts/$OLDERROOTCA
|
||||
fi
|
||||
if [ -s older/$SUBCA ]
|
||||
then
|
||||
cp older/$SUBCA $IPSECDIR/cacerts/$OLDERSUBCA
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Reload the strongSwan charon daemon if it is running
|
||||
|
64
src/cert-enroll/cert-install-ldaputils
Normal file
64
src/cert-enroll/cert-install-ldaputils
Normal file
@ -0,0 +1,64 @@
|
||||
#!/bin/bash
|
||||
# Concatenate the present and past CA certificates into a single TLS_CACERT
|
||||
# file defined by ldap.conf so that the ldap-utils can verify the LDAP server
|
||||
# certificate.
|
||||
#
|
||||
# Copyright (C) 2024 Andreas Steffen
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in
|
||||
# all copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
# THE SOFTWARE.
|
||||
#
|
||||
set -e
|
||||
|
||||
##############################################################################
|
||||
# Set some local paths
|
||||
#
|
||||
|
||||
# Path to the LDAP configuration file
|
||||
LDAP_CONF="/etc/ldap/ldap.conf"
|
||||
|
||||
# Extract or set path to the LDAP TLS CA cert directory
|
||||
LDAP_TLS_CACERTS=$(awk '/TLS_CACERT/ {print $2}' $LDAP_CONF)
|
||||
|
||||
##############################################################################
|
||||
# Change into the certificate directory
|
||||
#
|
||||
cd $CERTDIR
|
||||
|
||||
##############################################################################
|
||||
# Concatenate the CA certificates into a single file
|
||||
#
|
||||
cat $ROOTCA $SUBCA > $LDAP_TLS_CACERTS
|
||||
if [ -s old/$ROOTCA ]
|
||||
then
|
||||
cat old/$ROOTCA >> $LDAP_TLS_CACERTS
|
||||
fi
|
||||
if [ -s old/$SUBCA ]
|
||||
then
|
||||
cat old/$SUBCA >> $LDAP_TLS_CACERTS
|
||||
fi
|
||||
if [ -s older/$ROOTCA ]
|
||||
then
|
||||
cat older/$ROOTCA >> $LDAP_TLS_CACERTS
|
||||
fi
|
||||
if [ -s older/$SUBCA ]
|
||||
then
|
||||
cat older/$SUBCA >> $LDAP_TLS_CACERTS
|
||||
fi
|
||||
|
||||
exit 0
|
@ -54,6 +54,14 @@ if [ -s old/$SUBCA ]
|
||||
then
|
||||
cp old/$SUBCA $OPENXPKI_TLS/chain/$OLDSUBCA
|
||||
fi
|
||||
if [ -s older/$ROOTCA ]
|
||||
then
|
||||
cp older/$ROOTCA $OPENXPKI_TLS/chain/$OLDERROOTCA
|
||||
fi
|
||||
if [ -s older/$SUBCA ]
|
||||
then
|
||||
cp older/$SUBCA $OPENXPKI_TLS/chain/$OLDERSUBCA
|
||||
fi
|
||||
|
||||
rm -f $OPENXPKI_TLS/chain/*.0
|
||||
|
||||
|
@ -49,6 +49,14 @@ if [ -s $CERTDIR/old/$SUBCA ]
|
||||
then
|
||||
cat $CERTDIR/old/$SUBCA >> $SSL_DIR/trusted.pem
|
||||
fi
|
||||
if [ -s $CERTDIR/older/$ROOTCA ]
|
||||
then
|
||||
cat $CERTDIR/older/$ROOTCA >> $SSL_DIR/trusted.pem
|
||||
fi
|
||||
if [ -s $CERTDIR/older/$SUBCA ]
|
||||
then
|
||||
cat $CERTDIR/older/$SUBCA >> $SSL_DIR/trusted.pem
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Restart the systemd service if it is active
|
||||
|
@ -53,6 +53,14 @@ if [ -s old/$SUBCA ]
|
||||
then
|
||||
cp old/$SUBCA $LDAP_TLS_CACERTDIR/$OLDSUBCA
|
||||
fi
|
||||
if [ -s older/$ROOTCA ]
|
||||
then
|
||||
cp older/$ROOTCA $LDAP_TLS_CACERTDIR/$OLDERROOTCA
|
||||
fi
|
||||
if [ -s older/$SUBCA ]
|
||||
then
|
||||
cp older/$SUBCA $LDAP_TLS_CACERTDIR/$OLDERSUBCA
|
||||
fi
|
||||
|
||||
rm -f $LDAP_TLS_CACERTDIR/*.0
|
||||
|
||||
|
@ -69,6 +69,14 @@ if [ -s old/$SUBCA ]
|
||||
then
|
||||
cp old/$SUBCA ${SWANCTLDIR}/x509ca/$OLDSUBCA
|
||||
fi
|
||||
if [ -s older/$ROOTCA ]
|
||||
then
|
||||
cp older/$ROOTCA ${SWANCTLDIR}/x509ca/$OLDERROOTCA
|
||||
fi
|
||||
if [ -s older/$SUBCA ]
|
||||
then
|
||||
cp older/$SUBCA ${SWANCTLDIR}/x509ca/$OLDERSUBCA
|
||||
fi
|
||||
|
||||
##############################################################################
|
||||
# Reload the strongswan systemd service if it is running
|
||||
|
@ -170,7 +170,7 @@ static peer_cfg_t* create_peer_cfg(private_cmd_connection_t *this)
|
||||
case PROF_V1_XAUTH_AM:
|
||||
case PROF_V1_XAUTH_PSK_AM:
|
||||
case PROF_V1_HYBRID_AM:
|
||||
peer.aggressive = TRUE;
|
||||
peer.options |= OPT_IKEV1_AGGRESSIVE;
|
||||
/* FALL */
|
||||
case PROF_V1_PUB:
|
||||
case PROF_V1_XAUTH:
|
||||
@ -585,7 +585,7 @@ cmd_connection_t *cmd_connection_create()
|
||||
lib->processor->queue_job(lib->processor,
|
||||
(job_t*)callback_job_create_with_prio(
|
||||
(callback_job_cb_t)initiate, this, NULL,
|
||||
(callback_job_cancel_t)return_false, JOB_PRIO_CRITICAL));
|
||||
callback_job_cancel_thread, JOB_PRIO_CRITICAL));
|
||||
|
||||
return &this->public;
|
||||
}
|
||||
|
@ -205,11 +205,29 @@ int main(int argc, char *argv[])
|
||||
/* install routes via XFRM interfaces, if we can use them */
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.plugins.kernel-netlink.install_routes_xfrmi", "yes");
|
||||
/* bypass IKE traffic from these routes in case traffic selectors conflict */
|
||||
/* use a separate routing table to avoid conflicts with regular charon */
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.plugins.socket-default.fwmark", "220");
|
||||
"charon-nm.routing_table", "210");
|
||||
/* use the same value as priority (higher than charon's default) */
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.plugins.kernel-netlink.fwmark", "!220");
|
||||
"charon-nm.routing_table_prio", "210");
|
||||
/* bypass IKE/ESP from these routes in case traffic selectors conflict */
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.plugins.socket-default.fwmark", "210");
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.plugins.kernel-netlink.fwmark", "!210");
|
||||
|
||||
/* trigger a DPD to verify the current path is working */
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.check_current_path", "yes");
|
||||
|
||||
/* fail more quickly so users don't have to wait too long for a new SA */
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.retransmit_tries", "3");
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.retransmit_timeout", "2.0");
|
||||
lib->settings->set_default_str(lib->settings,
|
||||
"charon-nm.retransmit_base", "1.4");
|
||||
|
||||
DBG1(DBG_DMN, "Starting charon NetworkManager backend (strongSwan "VERSION")");
|
||||
if (lib->integrity)
|
||||
|
@ -78,7 +78,8 @@ static job_requeue_t run(nm_backend_t *this)
|
||||
/**
|
||||
* Cancel the GLib Main Event Loop
|
||||
*/
|
||||
static bool cancel(nm_backend_t *this)
|
||||
CALLBACK(cancel, bool,
|
||||
nm_backend_t *this)
|
||||
{
|
||||
if (this->loop)
|
||||
{
|
||||
@ -152,7 +153,7 @@ static bool nm_backend_init()
|
||||
|
||||
lib->processor->queue_job(lib->processor,
|
||||
(job_t*)callback_job_create_with_prio((callback_job_cb_t)run, this,
|
||||
NULL, (callback_job_cancel_t)cancel, JOB_PRIO_CRITICAL));
|
||||
NULL, cancel, JOB_PRIO_CRITICAL));
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
|
@ -195,7 +195,7 @@ nm_handler_t *nm_handler_create()
|
||||
.public = {
|
||||
.handler = {
|
||||
.handle = _handle,
|
||||
.release = nop,
|
||||
.release = (void*)nop,
|
||||
.create_attribute_enumerator = _create_attribute_enumerator,
|
||||
},
|
||||
.create_enumerator = _create_enumerator,
|
||||
|
@ -214,6 +214,10 @@ static void signal_ip_config(NMVpnServicePlugin *plugin,
|
||||
|
||||
handler = priv->handler;
|
||||
|
||||
/* we can reconnect automatically if interfaces change */
|
||||
g_variant_builder_add (&builder, "{sv}", NM_VPN_PLUGIN_CAN_PERSIST,
|
||||
g_variant_new_boolean (TRUE));
|
||||
|
||||
/* NM apparently requires to know the gateway (it uses it to install a
|
||||
* direct route via physical interface if conflicting routes are passed) */
|
||||
other = ike_sa->get_other_host(ike_sa);
|
||||
@ -674,6 +678,51 @@ static bool add_auth_cfg_pw(NMStrongswanPluginPrivate *priv,
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add traffic selectors to the given config, optionally parse them from a
|
||||
* semicolon-separated list.
|
||||
*/
|
||||
static bool add_traffic_selectors(child_cfg_t *child_cfg, bool local,
|
||||
const char *list, GError **err)
|
||||
{
|
||||
enumerator_t *enumerator;
|
||||
traffic_selector_t *ts;
|
||||
char *token;
|
||||
|
||||
if (list && strlen(list))
|
||||
{
|
||||
enumerator = enumerator_create_token(list, ";", "");
|
||||
while (enumerator->enumerate(enumerator, &token))
|
||||
{
|
||||
ts = traffic_selector_create_from_cidr(token, 0, 0, 65535);
|
||||
if (!ts)
|
||||
{
|
||||
g_set_error(err, NM_VPN_PLUGIN_ERROR,
|
||||
NM_VPN_PLUGIN_ERROR_LAUNCH_FAILED,
|
||||
"Invalid %s traffic selector '%s'.",
|
||||
local ? "local" : "remote", token);
|
||||
enumerator->destroy(enumerator);
|
||||
return FALSE;
|
||||
}
|
||||
child_cfg->add_traffic_selector(child_cfg, local, ts);
|
||||
}
|
||||
enumerator->destroy(enumerator);
|
||||
}
|
||||
else if (local)
|
||||
{
|
||||
ts = traffic_selector_create_dynamic(0, 0, 65535);
|
||||
child_cfg->add_traffic_selector(child_cfg, TRUE, ts);
|
||||
}
|
||||
else
|
||||
{
|
||||
ts = traffic_selector_create_from_cidr("0.0.0.0/0", 0, 0, 65535);
|
||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||
ts = traffic_selector_create_from_cidr("::/0", 0, 0, 65535);
|
||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect function called from NM via DBUS
|
||||
*/
|
||||
@ -692,7 +741,6 @@ static gboolean connect_(NMVpnServicePlugin *plugin, NMConnection *connection,
|
||||
ike_cfg_t *ike_cfg;
|
||||
peer_cfg_t *peer_cfg;
|
||||
child_cfg_t *child_cfg;
|
||||
traffic_selector_t *ts;
|
||||
ike_sa_t *ike_sa;
|
||||
auth_cfg_t *auth;
|
||||
certificate_t *cert = NULL;
|
||||
@ -912,10 +960,9 @@ static gboolean connect_(NMVpnServicePlugin *plugin, NMConnection *connection,
|
||||
if (priv->xfrmi_id)
|
||||
{ /* set the same mark as for IKE packets on the ESP packets so no routing
|
||||
* loop is created if the TS covers the VPN server's IP */
|
||||
child.set_mark_out = (mark_t){
|
||||
.value = 220,
|
||||
.mask = 0xffffffff,
|
||||
};
|
||||
mark_from_string(lib->settings->get_str(lib->settings,
|
||||
"charon-nm.plugins.socket-default.fwmark", NULL),
|
||||
MARK_OP_NONE, &child.set_mark_out);
|
||||
child.if_id_in = child.if_id_out = priv->xfrmi_id;
|
||||
}
|
||||
|
||||
@ -946,36 +993,22 @@ static gboolean connect_(NMVpnServicePlugin *plugin, NMConnection *connection,
|
||||
child_cfg->add_proposal(child_cfg, proposal_create_default_aead(PROTO_ESP));
|
||||
child_cfg->add_proposal(child_cfg, proposal_create_default(PROTO_ESP));
|
||||
}
|
||||
ts = traffic_selector_create_dynamic(0, 0, 65535);
|
||||
child_cfg->add_traffic_selector(child_cfg, TRUE, ts);
|
||||
|
||||
str = nm_setting_vpn_get_data_item(vpn, "local-ts");
|
||||
if (!add_traffic_selectors(child_cfg, TRUE, str, err))
|
||||
{
|
||||
child_cfg->destroy(child_cfg);
|
||||
peer_cfg->destroy(peer_cfg);
|
||||
return FALSE;
|
||||
}
|
||||
str = nm_setting_vpn_get_data_item(vpn, "remote-ts");
|
||||
if (str && strlen(str))
|
||||
if (!add_traffic_selectors(child_cfg, FALSE, str, err))
|
||||
{
|
||||
enumerator = enumerator_create_token(str, ";", "");
|
||||
while (enumerator->enumerate(enumerator, &str))
|
||||
{
|
||||
ts = traffic_selector_create_from_cidr((char*)str, 0, 0, 65535);
|
||||
if (!ts)
|
||||
{
|
||||
g_set_error(err, NM_VPN_PLUGIN_ERROR,
|
||||
NM_VPN_PLUGIN_ERROR_LAUNCH_FAILED,
|
||||
"Invalid remote traffic selector.");
|
||||
enumerator->destroy(enumerator);
|
||||
child_cfg->destroy(child_cfg);
|
||||
peer_cfg->destroy(peer_cfg);
|
||||
return FALSE;
|
||||
}
|
||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||
}
|
||||
enumerator->destroy(enumerator);
|
||||
}
|
||||
else
|
||||
{
|
||||
ts = traffic_selector_create_from_cidr("0.0.0.0/0", 0, 0, 65535);
|
||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||
ts = traffic_selector_create_from_cidr("::/0", 0, 0, 65535);
|
||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||
child_cfg->destroy(child_cfg);
|
||||
peer_cfg->destroy(peer_cfg);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
peer_cfg->add_child_cfg(peer_cfg, child_cfg);
|
||||
|
||||
/**
|
||||
|
@ -37,7 +37,7 @@
|
||||
|
||||
#include "tkm.h"
|
||||
#include "tkm_nonceg.h"
|
||||
#include "tkm_diffie_hellman.h"
|
||||
#include "tkm_key_exchange.h"
|
||||
#include "tkm_keymat.h"
|
||||
#include "tkm_listener.h"
|
||||
#include "tkm_kernel_ipsec.h"
|
||||
@ -318,9 +318,9 @@ int main(int argc, char *argv[])
|
||||
lib->plugins->add_static_features(lib->plugins, "tkm-backend", features,
|
||||
countof(features), TRUE, NULL, NULL);
|
||||
|
||||
if (!register_dh_mapping())
|
||||
if (!register_ke_mapping())
|
||||
{
|
||||
DBG1(DBG_DMN, "no DH group mapping defined - aborting %s", dmn_name);
|
||||
DBG1(DBG_DMN, "no KE group mapping defined - aborting %s", dmn_name);
|
||||
goto deinit;
|
||||
}
|
||||
|
||||
@ -410,7 +410,7 @@ int main(int argc, char *argv[])
|
||||
lib->encoding->remove_encoder(lib->encoding, tkm_encoder_encode);
|
||||
|
||||
deinit:
|
||||
destroy_dh_mapping();
|
||||
destroy_ke_mapping();
|
||||
destroy_ca_mapping();
|
||||
libcharon_deinit();
|
||||
tkm_deinit();
|
||||
|
@ -25,7 +25,9 @@
|
||||
|
||||
void charon_esa_acquire(result_type *res, const sp_id_type sp_id)
|
||||
{
|
||||
kernel_acquire_data_t data = {};
|
||||
kernel_acquire_data_t data = {
|
||||
.cpu = CPU_ID_MAX,
|
||||
};
|
||||
|
||||
DBG1(DBG_KNL, "ees: acquire received for reqid %u", sp_id);
|
||||
charon->kernel->acquire(charon->kernel, sp_id, &data);
|
||||
|
@ -83,9 +83,10 @@ bool tkm_init()
|
||||
}
|
||||
|
||||
/* get limits from tkm */
|
||||
if (ike_tkm_limits(&max_requests, &limits[TKM_CTX_NONCE], &limits[TKM_CTX_DH],
|
||||
if (ike_tkm_limits(&max_requests, &limits[TKM_CTX_NONCE], &limits[TKM_CTX_KE],
|
||||
&limits[TKM_CTX_CC], &limits[TKM_CTX_AE],
|
||||
&limits[TKM_CTX_ISA], &limits[TKM_CTX_ESA]) != TKM_OK)
|
||||
&limits[TKM_CTX_ISA], &limits[TKM_CTX_ESA],
|
||||
&limits[TKM_CTX_BLOB]) != TKM_OK)
|
||||
{
|
||||
ees_server_finalize();
|
||||
tkmlib_final();
|
||||
|
@ -20,14 +20,15 @@
|
||||
#include <utils/debug.h>
|
||||
#include <threading/rwlock.h>
|
||||
|
||||
ENUM_BEGIN(tkm_context_kind_names, TKM_CTX_NONCE, TKM_CTX_ESA,
|
||||
ENUM_BEGIN(tkm_context_kind_names, TKM_CTX_NONCE, TKM_CTX_BLOB,
|
||||
"NONCE_CONTEXT",
|
||||
"DH_CONTEXT",
|
||||
"CC_CONTEXT",
|
||||
"ISA_CONTEXT",
|
||||
"AE_CONTEXT",
|
||||
"ESA_CONTEXT");
|
||||
ENUM_END(tkm_context_kind_names, TKM_CTX_ESA);
|
||||
"ESA_CONTEXT",
|
||||
"BLOB_CONTEXT");
|
||||
ENUM_END(tkm_context_kind_names, TKM_CTX_BLOB);
|
||||
|
||||
typedef struct private_tkm_id_manager_t private_tkm_id_manager_t;
|
||||
|
||||
|
@ -34,8 +34,8 @@ typedef enum tkm_context_kind_t tkm_context_kind_t;
|
||||
enum tkm_context_kind_t {
|
||||
/** Nonce context */
|
||||
TKM_CTX_NONCE,
|
||||
/** Diffie-Hellman context */
|
||||
TKM_CTX_DH,
|
||||
/** Key Exchange context */
|
||||
TKM_CTX_KE,
|
||||
/** Certificate chain context */
|
||||
TKM_CTX_CC,
|
||||
/** IKE SA context */
|
||||
@ -44,6 +44,8 @@ enum tkm_context_kind_t {
|
||||
TKM_CTX_AE,
|
||||
/** ESP SA context */
|
||||
TKM_CTX_ESA,
|
||||
/** Blob context */
|
||||
TKM_CTX_BLOB,
|
||||
|
||||
/** helper to determine the number of elements in this enum */
|
||||
TKM_CTX_MAX,
|
||||
|
@ -93,6 +93,7 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
||||
kernel_ipsec_add_sa_t *data)
|
||||
{
|
||||
esa_info_t esa;
|
||||
esa_flags_type flags;
|
||||
esp_spi_type spi_loc, spi_rem;
|
||||
host_t *local, *peer;
|
||||
chunk_t *nonce_loc, *nonce_rem;
|
||||
@ -107,31 +108,31 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
||||
}
|
||||
esa = *(esa_info_t *)(data->enc_key.ptr);
|
||||
|
||||
/* only handle the case where we have both distinct ESP spi's available */
|
||||
if (esa.spi_r == id->spi)
|
||||
/* only handle the case where we have both distinct ESP SPIs available,
|
||||
* which is always the outbound SA */
|
||||
if (esa.spi_l == id->spi)
|
||||
{
|
||||
chunk_free(&esa.nonce_i);
|
||||
chunk_free(&esa.nonce_r);
|
||||
return SUCCESS;
|
||||
}
|
||||
|
||||
spi_loc = esa.spi_l;
|
||||
spi_rem = id->spi;
|
||||
local = id->src;
|
||||
peer = id->dst;
|
||||
|
||||
if (data->initiator)
|
||||
{
|
||||
spi_loc = id->spi;
|
||||
spi_rem = esa.spi_r;
|
||||
local = id->dst;
|
||||
peer = id->src;
|
||||
nonce_loc = &esa.nonce_i;
|
||||
nonce_rem = &esa.nonce_r;
|
||||
flags = TKM_ESA_INITIATOR;
|
||||
}
|
||||
else
|
||||
{
|
||||
spi_loc = esa.spi_r;
|
||||
spi_rem = id->spi;
|
||||
local = id->src;
|
||||
peer = id->dst;
|
||||
nonce_loc = &esa.nonce_r;
|
||||
nonce_rem = &esa.nonce_i;
|
||||
flags = 0;
|
||||
}
|
||||
|
||||
esa_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_ESA);
|
||||
@ -150,24 +151,24 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
||||
|
||||
/*
|
||||
* creation of first CHILD SA:
|
||||
* no nonce and no dh contexts because the ones from the IKE SA are re-used
|
||||
* no nonce and no ke contexts because the ones from the IKE SA are re-used
|
||||
*/
|
||||
nonce_loc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce_loc);
|
||||
if (nonce_loc_id == 0 && esa.dh_id == 0)
|
||||
if (nonce_loc_id == 0 && esa.ke_ids.size == 0)
|
||||
{
|
||||
if (ike_esa_create_first(esa_id, esa.isa_id, data->reqid, 1, spi_loc,
|
||||
spi_rem) != TKM_OK)
|
||||
if (ike_esa_create_first(esa_id, esa.isa_id, data->reqid, 1, flags,
|
||||
spi_loc, spi_rem) != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_KNL, "child SA (%llu, first) creation failed", esa_id);
|
||||
goto failure;
|
||||
}
|
||||
}
|
||||
/* creation of child SA without PFS: no dh context */
|
||||
else if (nonce_loc_id != 0 && esa.dh_id == 0)
|
||||
/* creation of child SA without PFS: no ke context */
|
||||
else if (nonce_loc_id != 0 && esa.ke_ids.size == 0)
|
||||
{
|
||||
chunk_to_sequence(nonce_rem, &nc_rem, sizeof(nonce_type));
|
||||
if (ike_esa_create_no_pfs(esa_id, esa.isa_id, data->reqid, 1,
|
||||
nonce_loc_id, nc_rem, data->initiator,
|
||||
nonce_loc_id, nc_rem, flags,
|
||||
spi_loc, spi_rem) != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_KNL, "child SA (%llu, no PFS) creation failed", esa_id);
|
||||
@ -176,12 +177,12 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
||||
tkm->chunk_map->remove(tkm->chunk_map, nonce_loc);
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nonce_loc_id);
|
||||
}
|
||||
/* creation of subsequent child SA with PFS: nonce and dh context are set */
|
||||
/* creation of subsequent child SA with PFS: nonce and ke context are set */
|
||||
else
|
||||
{
|
||||
chunk_to_sequence(nonce_rem, &nc_rem, sizeof(nonce_type));
|
||||
if (ike_esa_create(esa_id, esa.isa_id, data->reqid, 1, esa.dh_id,
|
||||
nonce_loc_id, nc_rem, data->initiator, spi_loc,
|
||||
if (ike_esa_create(esa_id, esa.isa_id, data->reqid, 1, esa.ke_ids,
|
||||
nonce_loc_id, nc_rem, flags, spi_loc,
|
||||
spi_rem) != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_KNL, "child SA (%llu) creation failed", esa_id);
|
||||
|
@ -20,92 +20,110 @@
|
||||
|
||||
#include "tkm.h"
|
||||
#include "tkm_utils.h"
|
||||
#include "tkm_diffie_hellman.h"
|
||||
#include "tkm_key_exchange.h"
|
||||
|
||||
#include <daemon.h>
|
||||
#include <collections/hashtable.h>
|
||||
|
||||
typedef struct private_tkm_diffie_hellman_t private_tkm_diffie_hellman_t;
|
||||
typedef struct private_tkm_key_exchange_t private_tkm_key_exchange_t;
|
||||
|
||||
static hashtable_t *group_map = NULL;
|
||||
static hashtable_t *method_map = NULL;
|
||||
|
||||
/**
|
||||
* Private data of a tkm_diffie_hellman_t object.
|
||||
* Private data of a tkm_key_exchange_t object.
|
||||
*/
|
||||
struct private_tkm_diffie_hellman_t {
|
||||
struct private_tkm_key_exchange_t {
|
||||
|
||||
/**
|
||||
* Public tkm_diffie_hellman_t interface.
|
||||
* Public tkm_key_exchange_t interface.
|
||||
*/
|
||||
tkm_diffie_hellman_t public;
|
||||
tkm_key_exchange_t public;
|
||||
|
||||
/**
|
||||
* Diffie-Hellman group number.
|
||||
* Key exchange method identifier.
|
||||
*/
|
||||
key_exchange_method_t group;
|
||||
key_exchange_method_t method;
|
||||
|
||||
/**
|
||||
* Diffie-Hellman public value.
|
||||
* Key exchange algorithm ID corresponding to method.
|
||||
*/
|
||||
dh_pubvalue_type pubvalue;
|
||||
uint64_t kea_id;
|
||||
|
||||
/**
|
||||
* Context id.
|
||||
*/
|
||||
dh_id_type context_id;
|
||||
ke_id_type context_id;
|
||||
|
||||
};
|
||||
|
||||
METHOD(key_exchange_t, get_public_key, bool,
|
||||
private_tkm_diffie_hellman_t *this, chunk_t *value)
|
||||
private_tkm_key_exchange_t *this, chunk_t *value)
|
||||
{
|
||||
sequence_to_chunk(this->pubvalue.data, this->pubvalue.size, value);
|
||||
return TRUE;
|
||||
blob_id_type pubvalue_id;
|
||||
blob_length_type pubvalue_length;
|
||||
bool ret = FALSE;
|
||||
|
||||
pubvalue_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_BLOB);
|
||||
if (pubvalue_id)
|
||||
{
|
||||
ret = ike_ke_get(this->context_id, this->kea_id, pubvalue_id,
|
||||
&pubvalue_length) == TKM_OK &&
|
||||
blob_to_chunk(pubvalue_id, pubvalue_length, value);
|
||||
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_BLOB, pubvalue_id);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
METHOD(key_exchange_t, get_shared_secret, bool,
|
||||
private_tkm_diffie_hellman_t *this, chunk_t *secret)
|
||||
private_tkm_key_exchange_t *this, chunk_t *secret)
|
||||
{
|
||||
*secret = chunk_empty;
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
METHOD(key_exchange_t, set_public_key, bool,
|
||||
private_tkm_diffie_hellman_t *this, chunk_t value)
|
||||
private_tkm_key_exchange_t *this, chunk_t value)
|
||||
{
|
||||
dh_pubvalue_type othervalue;
|
||||
blob_id_type pubvalue_id;
|
||||
bool ret = FALSE;
|
||||
|
||||
if (!key_exchange_verify_pubkey(this->group, value) ||
|
||||
value.len > sizeof(othervalue.data))
|
||||
if (!key_exchange_verify_pubkey(this->method, value))
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
othervalue.size = value.len;
|
||||
memcpy(&othervalue.data, value.ptr, value.len);
|
||||
|
||||
return ike_dh_generate_key(this->context_id, othervalue) == TKM_OK;
|
||||
pubvalue_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_BLOB);
|
||||
if (pubvalue_id)
|
||||
{
|
||||
ret = chunk_to_blob(pubvalue_id, &value) &&
|
||||
ike_ke_set(this->context_id, this->kea_id, pubvalue_id) == TKM_OK;
|
||||
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_BLOB, pubvalue_id);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
METHOD(key_exchange_t, get_method, key_exchange_method_t,
|
||||
private_tkm_diffie_hellman_t *this)
|
||||
private_tkm_key_exchange_t *this)
|
||||
{
|
||||
return this->group;
|
||||
return this->method;
|
||||
}
|
||||
|
||||
METHOD(key_exchange_t, destroy, void,
|
||||
private_tkm_diffie_hellman_t *this)
|
||||
private_tkm_key_exchange_t *this)
|
||||
{
|
||||
if (ike_dh_reset(this->context_id) != TKM_OK)
|
||||
if (ike_ke_reset(this->context_id) != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_LIB, "failed to reset DH context %d", this->context_id);
|
||||
DBG1(DBG_LIB, "failed to reset KE context %d", this->context_id);
|
||||
}
|
||||
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_DH, this->context_id);
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_KE, this->context_id);
|
||||
free(this);
|
||||
}
|
||||
|
||||
METHOD(tkm_diffie_hellman_t, get_id, dh_id_type,
|
||||
private_tkm_diffie_hellman_t *this)
|
||||
METHOD(tkm_key_exchange_t, get_id, ke_id_type,
|
||||
private_tkm_key_exchange_t *this)
|
||||
{
|
||||
return this->context_id;
|
||||
}
|
||||
@ -124,7 +142,7 @@ static bool equals(void *key, void *other_key)
|
||||
/*
|
||||
* Described in header.
|
||||
*/
|
||||
int register_dh_mapping()
|
||||
int register_ke_mapping()
|
||||
{
|
||||
int count, i;
|
||||
char *iana_id_str, *tkm_id_str;
|
||||
@ -137,7 +155,7 @@ int register_dh_mapping()
|
||||
(hashtable_equals_t)equals, 16);
|
||||
|
||||
enumerator = lib->settings->create_key_value_enumerator(lib->settings,
|
||||
"%s.dh_mapping",
|
||||
"%s.ke_mapping",
|
||||
lib->ns);
|
||||
|
||||
while (enumerator->enumerate(enumerator, &iana_id_str, &tkm_id_str))
|
||||
@ -153,7 +171,7 @@ int register_dh_mapping()
|
||||
|
||||
count = map->get_count(map);
|
||||
plugin_feature_t f[count + 1];
|
||||
f[0] = PLUGIN_REGISTER(KE, tkm_diffie_hellman_create);
|
||||
f[0] = PLUGIN_REGISTER(KE, tkm_key_exchange_create);
|
||||
|
||||
i = 1;
|
||||
enumerator = map->create_enumerator(map);
|
||||
@ -164,12 +182,12 @@ int register_dh_mapping()
|
||||
}
|
||||
enumerator->destroy(enumerator);
|
||||
|
||||
lib->plugins->add_static_features(lib->plugins, "tkm-dh", f, countof(f),
|
||||
lib->plugins->add_static_features(lib->plugins, "tkm-ke", f, countof(f),
|
||||
TRUE, NULL, NULL);
|
||||
|
||||
if (count > 0)
|
||||
{
|
||||
group_map = map;
|
||||
method_map = map;
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -182,32 +200,33 @@ int register_dh_mapping()
|
||||
/*
|
||||
* Described in header.
|
||||
*/
|
||||
void destroy_dh_mapping()
|
||||
void destroy_ke_mapping()
|
||||
{
|
||||
enumerator_t *enumerator;
|
||||
char *key, *value;
|
||||
|
||||
if (group_map)
|
||||
if (method_map)
|
||||
{
|
||||
enumerator = group_map->create_enumerator(group_map);
|
||||
enumerator = method_map->create_enumerator(method_map);
|
||||
while (enumerator->enumerate(enumerator, &key, &value))
|
||||
{
|
||||
free(key);
|
||||
free(value);
|
||||
}
|
||||
enumerator->destroy(enumerator);
|
||||
group_map->destroy(group_map);
|
||||
method_map->destroy(method_map);
|
||||
method_map = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Described in header.
|
||||
*/
|
||||
tkm_diffie_hellman_t *tkm_diffie_hellman_create(key_exchange_method_t group)
|
||||
tkm_key_exchange_t *tkm_key_exchange_create(key_exchange_method_t method)
|
||||
{
|
||||
private_tkm_diffie_hellman_t *this;
|
||||
private_tkm_key_exchange_t *this;
|
||||
|
||||
if (!group_map)
|
||||
if (!method_map)
|
||||
{
|
||||
return NULL;
|
||||
}
|
||||
@ -223,8 +242,8 @@ tkm_diffie_hellman_t *tkm_diffie_hellman_create(key_exchange_method_t group)
|
||||
},
|
||||
.get_id = _get_id,
|
||||
},
|
||||
.group = group,
|
||||
.context_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_DH),
|
||||
.method = method,
|
||||
.context_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_KE),
|
||||
);
|
||||
|
||||
if (!this->context_id)
|
||||
@ -233,18 +252,14 @@ tkm_diffie_hellman_t *tkm_diffie_hellman_create(key_exchange_method_t group)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
uint64_t *dha_id = group_map->get(group_map, &group);
|
||||
if (!dha_id)
|
||||
uint64_t *kea_id_ptr = method_map->get(method_map, &method);
|
||||
if (!kea_id_ptr)
|
||||
{
|
||||
free(this);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (ike_dh_create(this->context_id, *dha_id, &this->pubvalue) != TKM_OK)
|
||||
{
|
||||
free(this);
|
||||
return NULL;
|
||||
}
|
||||
this->kea_id = *kea_id_ptr;
|
||||
|
||||
return &this->public;
|
||||
}
|
@ -16,14 +16,14 @@
|
||||
*/
|
||||
|
||||
/**
|
||||
* @defgroup tkm-dh diffie hellman
|
||||
* @defgroup tkm-ke key exchange
|
||||
* @{ @ingroup tkm
|
||||
*/
|
||||
|
||||
#ifndef TKM_DIFFIE_HELLMAN_H_
|
||||
#define TKM_DIFFIE_HELLMAN_H_
|
||||
#ifndef TKM_KEY_EXCHANGE_H_
|
||||
#define TKM_KEY_EXCHANGE_H_
|
||||
|
||||
typedef struct tkm_diffie_hellman_t tkm_diffie_hellman_t;
|
||||
typedef struct tkm_key_exchange_t tkm_key_exchange_t;
|
||||
|
||||
#include <library.h>
|
||||
#include <tkm/types.h>
|
||||
@ -31,7 +31,7 @@ typedef struct tkm_diffie_hellman_t tkm_diffie_hellman_t;
|
||||
/**
|
||||
* key_exchange_t implementation using the trusted key manager.
|
||||
*/
|
||||
struct tkm_diffie_hellman_t {
|
||||
struct tkm_key_exchange_t {
|
||||
|
||||
/**
|
||||
* Implements key_exchange_t interface.
|
||||
@ -39,33 +39,33 @@ struct tkm_diffie_hellman_t {
|
||||
key_exchange_t ke;
|
||||
|
||||
/**
|
||||
* Get Diffie-Hellman context id.
|
||||
* Get Key Exchange context id.
|
||||
*
|
||||
* @return id of this DH context.
|
||||
* @return id of this KE context.
|
||||
*/
|
||||
dh_id_type (*get_id)(tkm_diffie_hellman_t * const this);
|
||||
ke_id_type (*get_id)(tkm_key_exchange_t * const this);
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* Loads IANA DH group identifier to TKM id mapping from config and registers
|
||||
* the corresponding DH features.
|
||||
* Loads IANA KE method identifier to TKM id mapping from config and registers
|
||||
* the corresponding KE plugin features.
|
||||
*
|
||||
* @return number of registered mappings
|
||||
*/
|
||||
int register_dh_mapping();
|
||||
int register_ke_mapping();
|
||||
|
||||
/**
|
||||
* Destroy IANA DH group identifier to TKM id mapping.
|
||||
* Destroy IANA KE method identifier to TKM id mapping.
|
||||
*/
|
||||
void destroy_dh_mapping();
|
||||
void destroy_ke_mapping();
|
||||
|
||||
/**
|
||||
* Creates a new tkm_diffie_hellman_t object.
|
||||
* Creates a new tkm_key_exchange_t object.
|
||||
*
|
||||
* @param group Diffie Hellman group number to use
|
||||
* @return tkm_diffie_hellman_t object, NULL if not supported
|
||||
* @param method Key exchange method to use
|
||||
* @return tkm_key_exchange_t object, NULL if not supported
|
||||
*/
|
||||
tkm_diffie_hellman_t *tkm_diffie_hellman_create(key_exchange_method_t group);
|
||||
tkm_key_exchange_t *tkm_key_exchange_create(key_exchange_method_t method);
|
||||
|
||||
#endif /** TKM_DIFFIE_HELLMAN_H_ @}*/
|
||||
#endif /** TKM_KEY_EXCHANGE_H_ @}*/
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2015 Tobias Brunner
|
||||
* Copyright (C) 2015-2020 Tobias Brunner
|
||||
* Copyright (C) 2012 Reto Buerki
|
||||
* Copyright (C) 2012 Adrian-Ken Rueegsegger
|
||||
*
|
||||
@ -24,7 +24,7 @@
|
||||
#include "tkm.h"
|
||||
#include "tkm_types.h"
|
||||
#include "tkm_utils.h"
|
||||
#include "tkm_diffie_hellman.h"
|
||||
#include "tkm_key_exchange.h"
|
||||
#include "tkm_keymat.h"
|
||||
#include "tkm_aead.h"
|
||||
|
||||
@ -94,33 +94,50 @@ METHOD(keymat_t, create_nonce_gen, nonce_gen_t*,
|
||||
return lib->crypto->create_nonce_gen(lib->crypto);
|
||||
}
|
||||
|
||||
/**
|
||||
* Concatenate the TKM KE IDs of the passed key exchanges
|
||||
*/
|
||||
static bool concat_ke_ids(array_t *kes, ke_ids_type *ids)
|
||||
{
|
||||
tkm_key_exchange_t *tkm_ke;
|
||||
uint32_t i;
|
||||
|
||||
memset(ids, 0, sizeof(*ids));
|
||||
ids->size = array_count(kes);
|
||||
|
||||
if (!ids->size || ids->size > 8)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
for (i = 0; i < ids->size; i++)
|
||||
{
|
||||
array_get(kes, i, &tkm_ke);
|
||||
ids->data[i] = tkm_ke->get_id(tkm_ke);
|
||||
}
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
METHOD(keymat_v2_t, derive_ike_keys, bool,
|
||||
private_tkm_keymat_t *this, proposal_t *proposal, key_exchange_t *ke,
|
||||
private_tkm_keymat_t *this, proposal_t *proposal, array_t *kes,
|
||||
chunk_t nonce_i, chunk_t nonce_r, ike_sa_id_t *id,
|
||||
pseudo_random_function_t rekey_function, chunk_t rekey_skd)
|
||||
{
|
||||
uint64_t nc_id, spi_loc, spi_rem;
|
||||
uint64_t nc_id = 0, spi_loc, spi_rem;
|
||||
chunk_t *nonce;
|
||||
tkm_diffie_hellman_t *tkm_dh;
|
||||
dh_id_type dh_id;
|
||||
ke_ids_type ke_ids;
|
||||
nonce_type nonce_rem;
|
||||
result_type res;
|
||||
block_len_type block_len;
|
||||
icv_len_type icv_len;
|
||||
iv_len_type iv_len;
|
||||
|
||||
/* Acquire nonce context id */
|
||||
nonce = this->initiator ? &nonce_i : &nonce_r;
|
||||
nc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce);
|
||||
if (!nc_id)
|
||||
if (!concat_ke_ids(kes, &ke_ids))
|
||||
{
|
||||
DBG1(DBG_IKE, "unable to acquire context id for nonce");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
/* Get DH context id */
|
||||
tkm_dh = (tkm_diffie_hellman_t *)ke;
|
||||
dh_id = tkm_dh->get_id(tkm_dh);
|
||||
nonce = this->initiator ? &nonce_i : &nonce_r;
|
||||
|
||||
if (this->initiator)
|
||||
{
|
||||
@ -137,16 +154,24 @@ METHOD(keymat_v2_t, derive_ike_keys, bool,
|
||||
|
||||
if (rekey_function == PRF_UNDEFINED)
|
||||
{
|
||||
/* Acquire nonce context id */
|
||||
nc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce);
|
||||
if (!nc_id)
|
||||
{
|
||||
DBG1(DBG_IKE, "unable to acquire context id for nonce");
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
this->ae_ctx_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_AE);
|
||||
if (!this->ae_ctx_id)
|
||||
{
|
||||
DBG1(DBG_IKE, "unable to acquire ae context id");
|
||||
return FALSE;
|
||||
}
|
||||
DBG1(DBG_IKE, "deriving IKE keys (nc: %llu, dh: %llu, spi_loc: %llx, "
|
||||
"spi_rem: %llx)", nc_id, dh_id, spi_loc, spi_rem);
|
||||
res = ike_isa_create(this->isa_ctx_id, this->ae_ctx_id, 1, dh_id, nc_id,
|
||||
nonce_rem, this->initiator, spi_loc, spi_rem,
|
||||
DBG1(DBG_IKE, "deriving IKE keys (nc: %llu, ke: %llu, spi_loc: %llx, "
|
||||
"spi_rem: %llx)", nc_id, ke_ids.data[0], spi_loc, spi_rem);
|
||||
res = ike_isa_create(this->isa_ctx_id, this->ae_ctx_id, 1, ke_ids.data[0],
|
||||
nc_id, nonce_rem, this->initiator, spi_loc, spi_rem,
|
||||
&block_len, &icv_len, &iv_len);
|
||||
}
|
||||
else
|
||||
@ -159,77 +184,103 @@ METHOD(keymat_v2_t, derive_ike_keys, bool,
|
||||
return FALSE;
|
||||
}
|
||||
isa_info = *((isa_info_t *)(rekey_skd.ptr));
|
||||
DBG1(DBG_IKE, "deriving IKE keys (parent_isa: %llu, ae: %llu, nc: %llu,"
|
||||
" dh: %llu, spi_loc: %llx, spi_rem: %llx)", isa_info.parent_isa_id,
|
||||
isa_info.ae_id, nc_id, dh_id, spi_loc, spi_rem);
|
||||
|
||||
if (!tkm->idmgr->acquire_ref(tkm->idmgr, TKM_CTX_AE, isa_info.ae_id))
|
||||
if (this->ae_ctx_id == isa_info.ae_id)
|
||||
{
|
||||
DBG1(DBG_IKE, "deriving IKE keys (parent_isa: %llu, ae: %llu, "
|
||||
"ke: %llu, spi_loc: %llx, spi_rem: %llx)", isa_info.parent_isa_id,
|
||||
isa_info.ae_id, ke_ids.data[0], spi_loc, spi_rem);
|
||||
|
||||
res = ike_isa_update(this->isa_ctx_id, ke_ids.data[0]);
|
||||
}
|
||||
else if (!(nc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce)))
|
||||
{
|
||||
DBG1(DBG_IKE, "unable to acquire context id for nonce");
|
||||
return FALSE;
|
||||
}
|
||||
else if (!tkm->idmgr->acquire_ref(tkm->idmgr, TKM_CTX_AE, isa_info.ae_id))
|
||||
{
|
||||
DBG1(DBG_IKE, "unable to acquire reference for ae: %llu",
|
||||
isa_info.ae_id);
|
||||
return FALSE;
|
||||
}
|
||||
this->ae_ctx_id = isa_info.ae_id;
|
||||
res = ike_isa_create_child(this->isa_ctx_id, isa_info.parent_isa_id, 1,
|
||||
dh_id, nc_id, nonce_rem, this->initiator,
|
||||
spi_loc, spi_rem, &block_len, &icv_len,
|
||||
&iv_len);
|
||||
else
|
||||
{
|
||||
DBG1(DBG_IKE, "deriving IKE keys (parent_isa: %llu, ae: %llu, nc: %llu, "
|
||||
"ke: %llu, spi_loc: %llx, spi_rem: %llx)", isa_info.parent_isa_id,
|
||||
isa_info.ae_id, nc_id, ke_ids.data[0], spi_loc, spi_rem);
|
||||
|
||||
this->ae_ctx_id = isa_info.ae_id;
|
||||
res = ike_isa_create_child(this->isa_ctx_id, isa_info.parent_isa_id, 1,
|
||||
ke_ids, nc_id, nonce_rem, this->initiator,
|
||||
spi_loc, spi_rem, &block_len, &icv_len,
|
||||
&iv_len);
|
||||
}
|
||||
|
||||
chunk_free(&rekey_skd);
|
||||
}
|
||||
|
||||
if (nc_id)
|
||||
{
|
||||
tkm->chunk_map->remove(tkm->chunk_map, nonce);
|
||||
if (ike_nc_reset(nc_id) != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_IKE, "failed to reset nonce context %llu", nc_id);
|
||||
}
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nc_id);
|
||||
}
|
||||
|
||||
if (res != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_IKE, "key derivation failed (isa: %llu)", this->isa_ctx_id);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
this->aead = tkm_aead_create(this->isa_ctx_id, block_len, icv_len, iv_len);
|
||||
if (!this->aead)
|
||||
{
|
||||
this->aead = tkm_aead_create(this->isa_ctx_id, block_len, icv_len,
|
||||
iv_len);
|
||||
}
|
||||
|
||||
/* TODO: Add failure handler (see keymat_v2.c) */
|
||||
|
||||
tkm->chunk_map->remove(tkm->chunk_map, nonce);
|
||||
if (ike_nc_reset(nc_id) != TKM_OK)
|
||||
{
|
||||
DBG1(DBG_IKE, "failed to reset nonce context %llu", nc_id);
|
||||
}
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nc_id);
|
||||
|
||||
return TRUE;
|
||||
}
|
||||
|
||||
METHOD(keymat_v2_t, derive_child_keys, bool,
|
||||
private_tkm_keymat_t *this, proposal_t *proposal, key_exchange_t *ke,
|
||||
private_tkm_keymat_t *this, proposal_t *proposal, array_t *kes,
|
||||
chunk_t nonce_i, chunk_t nonce_r, chunk_t *encr_i, chunk_t *integ_i,
|
||||
chunk_t *encr_r, chunk_t *integ_r)
|
||||
{
|
||||
esa_info_t *esa_info_i, *esa_info_r;
|
||||
dh_id_type dh_id = 0;
|
||||
ke_ids_type ke_ids = {};
|
||||
|
||||
if (ke)
|
||||
if (kes && !concat_ke_ids(kes, &ke_ids))
|
||||
{
|
||||
dh_id = ((tkm_diffie_hellman_t *)ke)->get_id((tkm_diffie_hellman_t *)ke);
|
||||
return FALSE;
|
||||
}
|
||||
|
||||
INIT(esa_info_i,
|
||||
.isa_id = this->isa_ctx_id,
|
||||
.spi_r = proposal->get_spi(proposal),
|
||||
.spi_l = proposal->get_spi(proposal),
|
||||
.nonce_i = chunk_clone(nonce_i),
|
||||
.nonce_r = chunk_clone(nonce_r),
|
||||
.is_encr_r = FALSE,
|
||||
.dh_id = dh_id,
|
||||
.ke_ids = ke_ids,
|
||||
);
|
||||
|
||||
INIT(esa_info_r,
|
||||
.isa_id = this->isa_ctx_id,
|
||||
.spi_r = proposal->get_spi(proposal),
|
||||
.spi_l = proposal->get_spi(proposal),
|
||||
.nonce_i = chunk_clone(nonce_i),
|
||||
.nonce_r = chunk_clone(nonce_r),
|
||||
.is_encr_r = TRUE,
|
||||
.dh_id = dh_id,
|
||||
.ke_ids = ke_ids,
|
||||
);
|
||||
|
||||
DBG1(DBG_CHD, "passing on esa info (isa: %llu, spi_r: %x, dh_id: %llu)",
|
||||
esa_info_i->isa_id, ntohl(esa_info_i->spi_r), esa_info_i->dh_id);
|
||||
DBG1(DBG_CHD, "passing on esa info (isa: %llu, spi_l: %x, "
|
||||
"ke_id[%llu]: %llu)", esa_info_i->isa_id, ntohl(esa_info_i->spi_l),
|
||||
esa_info_i->ke_ids.size, esa_info_i->ke_ids.data[0]);
|
||||
|
||||
/* store ESA info in encr_i/r, which is passed to add_sa */
|
||||
*encr_i = chunk_create((u_char *)esa_info_i, sizeof(esa_info_t));
|
||||
@ -246,10 +297,30 @@ METHOD(keymat_t, get_aead, aead_t*,
|
||||
return this->aead;
|
||||
}
|
||||
|
||||
METHOD(keymat_v2_t, get_int_auth, bool,
|
||||
private_tkm_keymat_t *this, bool verify, chunk_t data, chunk_t prev,
|
||||
chunk_t *auth)
|
||||
{
|
||||
blob_id_type data_id;
|
||||
bool ret = FALSE;
|
||||
|
||||
*auth = chunk_empty;
|
||||
|
||||
data_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_BLOB);
|
||||
if (data_id)
|
||||
{
|
||||
ret = chunk_to_blob(data_id, &data) &&
|
||||
ike_isa_int_auth(this->isa_ctx_id, verify, data_id) == TKM_OK;
|
||||
|
||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_BLOB, data_id);
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
||||
METHOD(keymat_v2_t, get_auth_octets, bool,
|
||||
private_tkm_keymat_t *this, bool verify, chunk_t ike_sa_init,
|
||||
chunk_t nonce, chunk_t ppk, identification_t *id, char reserved[3],
|
||||
chunk_t *octets, array_t *schemes)
|
||||
chunk_t nonce, chunk_t int_auth, chunk_t ppk, identification_t *id,
|
||||
char reserved[3], chunk_t *octets, array_t *schemes)
|
||||
{
|
||||
sign_info_t *sign;
|
||||
|
||||
@ -279,6 +350,12 @@ METHOD(keymat_v2_t, get_skd, pseudo_random_function_t,
|
||||
{
|
||||
isa_info_t *isa_info;
|
||||
|
||||
if (!this->ae_ctx_id)
|
||||
{
|
||||
*skd = chunk_empty;
|
||||
return PRF_UNDEFINED;
|
||||
}
|
||||
|
||||
INIT(isa_info,
|
||||
.parent_isa_id = this->isa_ctx_id,
|
||||
.ae_id = this->ae_ctx_id,
|
||||
@ -291,8 +368,8 @@ METHOD(keymat_v2_t, get_skd, pseudo_random_function_t,
|
||||
|
||||
METHOD(keymat_v2_t, get_psk_sig, bool,
|
||||
private_tkm_keymat_t *this, bool verify, chunk_t ike_sa_init, chunk_t nonce,
|
||||
chunk_t secret, chunk_t ppk, identification_t *id, char reserved[3],
|
||||
chunk_t *sig)
|
||||
chunk_t int_auth, chunk_t secret, chunk_t ppk, identification_t *id,
|
||||
char reserved[3], chunk_t *sig)
|
||||
{
|
||||
return FALSE;
|
||||
}
|
||||
@ -388,6 +465,7 @@ tkm_keymat_t *tkm_keymat_create(bool initiator)
|
||||
.derive_ike_keys_ppk = (void*)return_false,
|
||||
.derive_child_keys = _derive_child_keys,
|
||||
.get_skd = _get_skd,
|
||||
.get_int_auth = _get_int_auth,
|
||||
.get_auth_octets = _get_auth_octets,
|
||||
.get_psk_sig = _get_psk_sig,
|
||||
.add_hash_algorithm = _add_hash_algorithm,
|
||||
|
@ -49,9 +49,9 @@ struct esa_info_t {
|
||||
isa_id_type isa_id;
|
||||
|
||||
/**
|
||||
* Responder SPI of child SA.
|
||||
* Local SPI of child SA.
|
||||
*/
|
||||
esp_spi_type spi_r;
|
||||
esp_spi_type spi_l;
|
||||
|
||||
/**
|
||||
* Initiator nonce.
|
||||
@ -70,9 +70,9 @@ struct esa_info_t {
|
||||
bool is_encr_r;
|
||||
|
||||
/**
|
||||
* Diffie-Hellman context id.
|
||||
* Key Exchange context ids.
|
||||
*/
|
||||
dh_id_type dh_id;
|
||||
ke_ids_type ke_ids;
|
||||
|
||||
};
|
||||
|
||||
|
@ -17,6 +17,9 @@
|
||||
|
||||
#include <utils/debug.h>
|
||||
|
||||
#include <tkm/client.h>
|
||||
#include <tkm/constants.h>
|
||||
|
||||
#include "tkm_utils.h"
|
||||
|
||||
/* Generic variable-length sequence */
|
||||
@ -52,3 +55,48 @@ void chunk_to_sequence(const chunk_t * const chunk, void *sequence,
|
||||
}
|
||||
memcpy(seq->data, chunk->ptr, seq->size);
|
||||
}
|
||||
|
||||
bool blob_to_chunk(blob_id_type id, blob_length_type len, chunk_t * const chunk)
|
||||
{
|
||||
blob_offset_type offset = 0;
|
||||
bool ret = TRUE;
|
||||
|
||||
*chunk = chunk_alloc(len);
|
||||
|
||||
while (len > 0 && ret)
|
||||
{
|
||||
blob_out_bytes_type blob_data;
|
||||
blob_length_type slice_len = min(len, sizeof(blob_data.data));
|
||||
|
||||
ret = ike_blob_read(id, offset, slice_len, &blob_data) == TKM_OK;
|
||||
memcpy(chunk->ptr + offset, blob_data.data, slice_len);
|
||||
offset += slice_len;
|
||||
len -= slice_len;
|
||||
}
|
||||
|
||||
ike_blob_reset(id);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
bool chunk_to_blob(blob_id_type id, const chunk_t * const chunk)
|
||||
{
|
||||
blob_length_type len = chunk->len;
|
||||
blob_offset_type offset = 0;
|
||||
bool ret;
|
||||
|
||||
ret = ike_blob_create(id, len) == TKM_OK;
|
||||
|
||||
while (len > 0 && ret)
|
||||
{
|
||||
blob_in_bytes_type blob_data;
|
||||
blob_length_type slice_len = min(len, sizeof(blob_data.data));
|
||||
|
||||
memcpy(blob_data.data, chunk->ptr + offset, slice_len);
|
||||
blob_data.size = slice_len;
|
||||
ret = ike_blob_write(id, offset, blob_data) == TKM_OK;
|
||||
offset += slice_len;
|
||||
len -= slice_len;
|
||||
}
|
||||
return ret;
|
||||
}
|
||||
|
@ -46,4 +46,21 @@ void sequence_to_chunk(const byte_t * const first, const uint32_t len,
|
||||
void chunk_to_sequence(const chunk_t * const chunk, void *sequence,
|
||||
const uint32_t typelen);
|
||||
|
||||
/**
|
||||
* Convert blob to chunk and reset the blob.
|
||||
*
|
||||
* @param id id of blob
|
||||
* @param len length of blob
|
||||
* @param chunk pointer to chunk struct
|
||||
*/
|
||||
bool blob_to_chunk(blob_id_type id, blob_length_type len, chunk_t * const chunk);
|
||||
|
||||
/**
|
||||
* Convert chunk to newly created blob.
|
||||
*
|
||||
* @param id id of blob
|
||||
* @param chunk pointer to chunk struct
|
||||
*/
|
||||
bool chunk_to_blob(blob_id_type id, const chunk_t * const chunk);
|
||||
|
||||
#endif /** TKM_UTILS_H_ @}*/
|
||||
|
@ -19,7 +19,7 @@
|
||||
|
||||
#include "tkm_id_manager.h"
|
||||
|
||||
static const tkm_limits_t limits = {125, 100, 55, 30, 200, 42};
|
||||
static const tkm_limits_t limits = {125, 100, 55, 30, 200, 42, 21};
|
||||
|
||||
START_TEST(test_id_mgr_creation)
|
||||
{
|
||||
|
@ -18,31 +18,31 @@
|
||||
#include <daemon.h>
|
||||
#include <tests/test_suite.h>
|
||||
|
||||
#include "tkm_diffie_hellman.h"
|
||||
#include "tkm_key_exchange.h"
|
||||
|
||||
START_TEST(test_dh_creation)
|
||||
START_TEST(test_ke_creation)
|
||||
{
|
||||
tkm_diffie_hellman_t *dh = NULL;
|
||||
tkm_key_exchange_t *ke = NULL;
|
||||
|
||||
dh = tkm_diffie_hellman_create(MODP_768_BIT);
|
||||
fail_if(dh, "MODP_768 created");
|
||||
ke = tkm_key_exchange_create(MODP_768_BIT);
|
||||
fail_if(ke, "MODP_768 created");
|
||||
|
||||
dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||
fail_if(!dh, "MODP_4096 not created");
|
||||
fail_if(!dh->get_id(dh), "Invalid context id (0)");
|
||||
ke = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke, "MODP_4096 not created");
|
||||
fail_if(!ke->get_id(ke), "Invalid context id (0)");
|
||||
|
||||
dh->ke.destroy(&dh->ke);
|
||||
ke->ke.destroy(&ke->ke);
|
||||
}
|
||||
END_TEST
|
||||
|
||||
START_TEST(test_dh_get_my_pubvalue)
|
||||
START_TEST(test_ke_get_my_pubvalue)
|
||||
{
|
||||
tkm_diffie_hellman_t *dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||
fail_if(!dh, "Unable to create DH");
|
||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke, "Unable to create KE");
|
||||
|
||||
chunk_t value;
|
||||
ck_assert(dh->ke.get_public_key(&dh->ke, &value));
|
||||
dh->ke.destroy(&dh->ke);
|
||||
ck_assert(ke->ke.get_public_key(&ke->ke, &value));
|
||||
ke->ke.destroy(&ke->ke);
|
||||
|
||||
fail_if(value.ptr == NULL, "Pubvalue is NULL");
|
||||
fail_if(value.len != 512, "Pubvalue size mismatch");
|
||||
@ -51,19 +51,19 @@ START_TEST(test_dh_get_my_pubvalue)
|
||||
}
|
||||
END_TEST
|
||||
|
||||
Suite *make_diffie_hellman_tests()
|
||||
Suite *make_key_exchange_tests()
|
||||
{
|
||||
Suite *s;
|
||||
TCase *tc;
|
||||
|
||||
s = suite_create("Diffie-Hellman");
|
||||
s = suite_create("key exchange");
|
||||
|
||||
tc = tcase_create("creation");
|
||||
tcase_add_test(tc, test_dh_creation);
|
||||
tcase_add_test(tc, test_ke_creation);
|
||||
suite_add_tcase(s, tc);
|
||||
|
||||
tc = tcase_create("get_my_pubvalue");
|
||||
tcase_add_test(tc, test_dh_get_my_pubvalue);
|
||||
tcase_add_test(tc, test_ke_get_my_pubvalue);
|
||||
suite_add_tcase(s, tc);
|
||||
|
||||
return s;
|
@ -24,7 +24,7 @@
|
||||
|
||||
#include "tkm.h"
|
||||
#include "tkm_nonceg.h"
|
||||
#include "tkm_diffie_hellman.h"
|
||||
#include "tkm_key_exchange.h"
|
||||
#include "tkm_keymat.h"
|
||||
#include "tkm_types.h"
|
||||
|
||||
@ -47,17 +47,20 @@ START_TEST(test_derive_ike_keys)
|
||||
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
||||
"Unable to allocate nonce");
|
||||
|
||||
tkm_diffie_hellman_t *dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||
fail_if(!dh, "Unable to create DH");
|
||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke, "Unable to create KE");
|
||||
|
||||
/* Use the same pubvalue for both sides */
|
||||
chunk_t pubvalue;
|
||||
ck_assert(dh->ke.get_public_key(&dh->ke, &pubvalue));
|
||||
ck_assert(dh->ke.set_public_key(&dh->ke, pubvalue));
|
||||
ck_assert(ke->ke.get_public_key(&ke->ke, &pubvalue));
|
||||
ck_assert(ke->ke.set_public_key(&ke->ke, pubvalue));
|
||||
|
||||
array_t *kes = NULL;
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
||||
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
||||
&dh->ke, nonce, nonce, ike_sa_id, PRF_UNDEFINED, chunk_empty),
|
||||
kes, nonce, nonce, ike_sa_id, PRF_UNDEFINED, chunk_empty),
|
||||
"Key derivation failed");
|
||||
array_destroy(kes);
|
||||
chunk_free(&nonce);
|
||||
|
||||
aead_t * const aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
||||
@ -70,17 +73,132 @@ START_TEST(test_derive_ike_keys)
|
||||
|
||||
ng->nonce_gen.destroy(&ng->nonce_gen);
|
||||
proposal->destroy(proposal);
|
||||
dh->ke.destroy(&dh->ke);
|
||||
ke->ke.destroy(&ke->ke);
|
||||
ike_sa_id->destroy(ike_sa_id);
|
||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
||||
chunk_free(&pubvalue);
|
||||
}
|
||||
END_TEST
|
||||
|
||||
START_TEST(test_derive_ike_keys_multi_ke)
|
||||
{
|
||||
proposal_t *proposal = proposal_create_from_string(PROTO_IKE,
|
||||
"aes256-sha512-modp3072-ke1_modp4096");
|
||||
fail_if(!proposal, "Unable to create proposal");
|
||||
ike_sa_id_t *ike_sa_id = ike_sa_id_create(IKEV2_MAJOR_VERSION,
|
||||
123912312312, 32312313122, TRUE);
|
||||
fail_if(!ike_sa_id, "Unable to create IKE SA ID");
|
||||
|
||||
tkm_keymat_t *keymat = tkm_keymat_create(TRUE);
|
||||
fail_if(!keymat, "Unable to create keymat");
|
||||
fail_if(!keymat->get_isa_id(keymat), "Invalid ISA context id (0)");
|
||||
|
||||
chunk_t nonce;
|
||||
tkm_nonceg_t *ng = tkm_nonceg_create();
|
||||
fail_if(!ng, "Unable to create nonce generator");
|
||||
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
||||
"Unable to allocate nonce");
|
||||
|
||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_3072_BIT);
|
||||
fail_if(!ke, "Unable to create first KE");
|
||||
|
||||
/* Use the same pubvalue for both sides */
|
||||
chunk_t pubvalue;
|
||||
ck_assert(ke->ke.get_public_key(&ke->ke, &pubvalue));
|
||||
ck_assert(ke->ke.set_public_key(&ke->ke, pubvalue));
|
||||
chunk_free(&pubvalue);
|
||||
|
||||
array_t *kes = NULL;
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
||||
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
||||
kes, nonce, nonce, ike_sa_id, PRF_UNDEFINED, chunk_empty),
|
||||
"Key derivation failed");
|
||||
array_destroy(kes);
|
||||
ke->ke.destroy(&ke->ke);
|
||||
|
||||
const aead_t *aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
||||
fail_if(!aead, "AEAD is NULL");
|
||||
|
||||
/* single KE during IKE_INTERMEDIATE on the same keymat with same nonces */
|
||||
pseudo_random_function_t prf;
|
||||
chunk_t skd;
|
||||
prf = keymat->keymat_v2.get_skd(&keymat->keymat_v2, &skd);
|
||||
fail_if(prf != PRF_HMAC_SHA2_512, "PRF incorrect");
|
||||
|
||||
ke = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke, "Unable to create second KE");
|
||||
ck_assert(ke->ke.get_public_key(&ke->ke, &pubvalue));
|
||||
ck_assert(ke->ke.set_public_key(&ke->ke, pubvalue));
|
||||
chunk_free(&pubvalue);
|
||||
|
||||
kes = NULL;
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
||||
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
||||
kes, nonce, nonce, ike_sa_id, prf, skd),
|
||||
"Second key derivation failed");
|
||||
array_destroy(kes);
|
||||
ke->ke.destroy(&ke->ke);
|
||||
chunk_free(&nonce);
|
||||
|
||||
aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
||||
fail_if(!aead, "AEAD is NULL");
|
||||
ng->nonce_gen.destroy(&ng->nonce_gen);
|
||||
ike_sa_id->destroy(ike_sa_id);
|
||||
|
||||
/* rekeying uses a new keymat/SA/nonce and multiple KEs */
|
||||
ike_sa_id = ike_sa_id_create(IKEV2_MAJOR_VERSION,
|
||||
34912312312, 612313122, TRUE);
|
||||
fail_if(!ike_sa_id, "Unable to create IKE SA ID");
|
||||
|
||||
tkm_keymat_t *keymat2 = tkm_keymat_create(TRUE);
|
||||
fail_if(!keymat2, "Unable to create keymat");
|
||||
fail_if(!keymat2->get_isa_id(keymat2), "Invalid ISA context id (0)");
|
||||
|
||||
ng = tkm_nonceg_create();
|
||||
fail_if(!ng, "Unable to create nonce generator");
|
||||
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
||||
"Unable to allocate nonce");
|
||||
|
||||
tkm_key_exchange_t *ke1 = tkm_key_exchange_create(MODP_3072_BIT);
|
||||
fail_if(!ke1, "Unable to create first KE");
|
||||
ck_assert(ke1->ke.get_public_key(&ke1->ke, &pubvalue));
|
||||
ck_assert(ke1->ke.set_public_key(&ke1->ke, pubvalue));
|
||||
chunk_free(&pubvalue);
|
||||
tkm_key_exchange_t *ke2 = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke2, "Unable to create second KE");
|
||||
ck_assert(ke2->ke.get_public_key(&ke2->ke, &pubvalue));
|
||||
ck_assert(ke2->ke.set_public_key(&ke2->ke, pubvalue));
|
||||
chunk_free(&pubvalue);
|
||||
|
||||
prf = keymat->keymat_v2.get_skd(&keymat->keymat_v2, &skd);
|
||||
fail_if(prf != PRF_HMAC_SHA2_512, "PRF incorrect");
|
||||
|
||||
kes = NULL;
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke1);
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke2);
|
||||
fail_unless(keymat2->keymat_v2.derive_ike_keys(&keymat2->keymat_v2, proposal,
|
||||
kes, nonce, nonce, ike_sa_id, prf, skd),
|
||||
"Rekey key derivation failed");
|
||||
array_destroy(kes);
|
||||
ke1->ke.destroy(&ke1->ke);
|
||||
ke2->ke.destroy(&ke2->ke);
|
||||
chunk_free(&nonce);
|
||||
|
||||
aead = keymat2->keymat_v2.keymat.get_aead(&keymat2->keymat_v2.keymat, TRUE);
|
||||
fail_if(!aead, "AEAD is NULL");
|
||||
|
||||
ng->nonce_gen.destroy(&ng->nonce_gen);
|
||||
proposal->destroy(proposal);
|
||||
ike_sa_id->destroy(ike_sa_id);
|
||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
||||
keymat2->keymat_v2.keymat.destroy(&keymat2->keymat_v2.keymat);
|
||||
}
|
||||
END_TEST
|
||||
|
||||
START_TEST(test_derive_child_keys)
|
||||
{
|
||||
tkm_diffie_hellman_t *dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||
fail_if(!dh, "Unable to create DH object");
|
||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke, "Unable to create DH object");
|
||||
proposal_t *proposal = proposal_create_from_string(PROTO_ESP,
|
||||
"aes256-sha512-modp4096");
|
||||
fail_if(!proposal, "Unable to create proposal");
|
||||
@ -90,28 +208,33 @@ START_TEST(test_derive_child_keys)
|
||||
fail_if(!keymat, "Unable to create keymat");
|
||||
|
||||
chunk_t encr_i, encr_r, integ_i, integ_r;
|
||||
chunk_t nonce = chunk_from_chars("test chunk");
|
||||
chunk_t nonce_i = chunk_from_chars("test chunk 1"),
|
||||
nonce_r = chunk_from_chars("test chunk 2");
|
||||
|
||||
array_t *kes = NULL;
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
||||
fail_unless(keymat->keymat_v2.derive_child_keys(&keymat->keymat_v2, proposal,
|
||||
&dh->ke,
|
||||
nonce, nonce, &encr_i,
|
||||
kes, nonce_i, nonce_r, &encr_i,
|
||||
&integ_i, &encr_r, &integ_r),
|
||||
"Child key derivation failed");
|
||||
array_destroy(kes);
|
||||
|
||||
esa_info_t *info = (esa_info_t *)encr_i.ptr;
|
||||
fail_if(!info, "encr_i does not contain esa information");
|
||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
||||
"Isa context id mismatch (encr_i)");
|
||||
fail_if(info->spi_r != 42,
|
||||
fail_if(info->spi_l != 42,
|
||||
"SPI mismatch (encr_i)");
|
||||
fail_unless(chunk_equals(info->nonce_i, nonce),
|
||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
||||
"nonce_i mismatch (encr_i)");
|
||||
fail_unless(chunk_equals(info->nonce_r, nonce),
|
||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
||||
"nonce_r mismatch (encr_i)");
|
||||
fail_if(info->is_encr_r,
|
||||
"Flag is_encr_r set for encr_i");
|
||||
fail_if(info->dh_id != dh->get_id(dh),
|
||||
"DH context id mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.size != 1,
|
||||
"KE context number mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.data[0] != ke->get_id(ke),
|
||||
"KE context id mismatch (encr_i)");
|
||||
chunk_free(&info->nonce_i);
|
||||
chunk_free(&info->nonce_r);
|
||||
|
||||
@ -119,21 +242,101 @@ START_TEST(test_derive_child_keys)
|
||||
fail_if(!info, "encr_r does not contain esa information");
|
||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
||||
"Isa context id mismatch (encr_r)");
|
||||
fail_if(info->spi_r != 42,
|
||||
fail_if(info->spi_l != 42,
|
||||
"SPI mismatch (encr_r)");
|
||||
fail_unless(chunk_equals(info->nonce_i, nonce),
|
||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
||||
"nonce_i mismatch (encr_r)");
|
||||
fail_unless(chunk_equals(info->nonce_r, nonce),
|
||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
||||
"nonce_r mismatch (encr_r)");
|
||||
fail_unless(info->is_encr_r,
|
||||
"Flag is_encr_r set for encr_r");
|
||||
fail_if(info->dh_id != dh->get_id(dh),
|
||||
"DH context id mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.size != 1,
|
||||
"KE context number mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.data[0] != ke->get_id(ke),
|
||||
"KE context id mismatch (encr_i)");
|
||||
chunk_free(&info->nonce_i);
|
||||
chunk_free(&info->nonce_r);
|
||||
|
||||
proposal->destroy(proposal);
|
||||
dh->ke.destroy(&dh->ke);
|
||||
ke->ke.destroy(&ke->ke);
|
||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
||||
chunk_free(&encr_i);
|
||||
chunk_free(&encr_r);
|
||||
}
|
||||
END_TEST
|
||||
|
||||
START_TEST(test_derive_child_keys_multi_ke)
|
||||
{
|
||||
tkm_key_exchange_t *ke1 = tkm_key_exchange_create(MODP_3072_BIT);
|
||||
fail_if(!ke1, "Unable to create DH object");
|
||||
tkm_key_exchange_t *ke2 = tkm_key_exchange_create(MODP_4096_BIT);
|
||||
fail_if(!ke2, "Unable to create DH object");
|
||||
proposal_t *proposal = proposal_create_from_string(PROTO_ESP,
|
||||
"aes256-sha512-modp4096");
|
||||
fail_if(!proposal, "Unable to create proposal");
|
||||
proposal->set_spi(proposal, 42);
|
||||
|
||||
tkm_keymat_t *keymat = tkm_keymat_create(TRUE);
|
||||
fail_if(!keymat, "Unable to create keymat");
|
||||
|
||||
chunk_t encr_i, encr_r, integ_i, integ_r;
|
||||
chunk_t nonce_i = chunk_from_chars("test chunk 1"),
|
||||
nonce_r = chunk_from_chars("test chunk 2");
|
||||
|
||||
array_t *kes = NULL;
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke1);
|
||||
array_insert_create(&kes, ARRAY_TAIL, ke2);
|
||||
fail_unless(keymat->keymat_v2.derive_child_keys(&keymat->keymat_v2, proposal,
|
||||
kes, nonce_i, nonce_r, &encr_i,
|
||||
&integ_i, &encr_r, &integ_r),
|
||||
"Child key derivation failed");
|
||||
array_destroy(kes);
|
||||
|
||||
esa_info_t *info = (esa_info_t *)encr_i.ptr;
|
||||
fail_if(!info, "encr_i does not contain esa information");
|
||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
||||
"Isa context id mismatch (encr_i)");
|
||||
fail_if(info->spi_l != 42,
|
||||
"SPI mismatch (encr_i)");
|
||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
||||
"nonce_i mismatch (encr_i)");
|
||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
||||
"nonce_r mismatch (encr_i)");
|
||||
fail_if(info->is_encr_r,
|
||||
"Flag is_encr_r set for encr_i");
|
||||
fail_if(info->ke_ids.size != 2,
|
||||
"KE context number mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.data[0] != ke1->get_id(ke1),
|
||||
"KE context id mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.data[1] != ke2->get_id(ke2),
|
||||
"KE context id mismatch (encr_i)");
|
||||
chunk_free(&info->nonce_i);
|
||||
chunk_free(&info->nonce_r);
|
||||
|
||||
info = (esa_info_t *)encr_r.ptr;
|
||||
fail_if(!info, "encr_r does not contain esa information");
|
||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
||||
"Isa context id mismatch (encr_r)");
|
||||
fail_if(info->spi_l != 42,
|
||||
"SPI mismatch (encr_r)");
|
||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
||||
"nonce_i mismatch (encr_r)");
|
||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
||||
"nonce_r mismatch (encr_r)");
|
||||
fail_unless(info->is_encr_r,
|
||||
"Flag is_encr_r set for encr_r");
|
||||
fail_if(info->ke_ids.size != 2,
|
||||
"KE context number mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.data[0] != ke1->get_id(ke1),
|
||||
"KE context id mismatch (encr_i)");
|
||||
fail_if(info->ke_ids.data[1] != ke2->get_id(ke2),
|
||||
"KE context id mismatch (encr_i)");
|
||||
chunk_free(&info->nonce_i);
|
||||
chunk_free(&info->nonce_r);
|
||||
|
||||
proposal->destroy(proposal);
|
||||
ke1->ke.destroy(&ke1->ke);
|
||||
ke2->ke.destroy(&ke2->ke);
|
||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
||||
chunk_free(&encr_i);
|
||||
chunk_free(&encr_r);
|
||||
@ -149,10 +352,12 @@ Suite *make_keymat_tests()
|
||||
|
||||
tc = tcase_create("derive IKE keys");
|
||||
tcase_add_test(tc, test_derive_ike_keys);
|
||||
tcase_add_test(tc, test_derive_ike_keys_multi_ke);
|
||||
suite_add_tcase(s, tc);
|
||||
|
||||
tc = tcase_create("derive CHILD keys");
|
||||
tcase_add_test(tc, test_derive_child_keys);
|
||||
tcase_add_test(tc, test_derive_child_keys_multi_ke);
|
||||
suite_add_tcase(s, tc);
|
||||
|
||||
return s;
|
||||
|
@ -23,7 +23,7 @@
|
||||
|
||||
#include "tkm.h"
|
||||
#include "tkm_nonceg.h"
|
||||
#include "tkm_diffie_hellman.h"
|
||||
#include "tkm_key_exchange.h"
|
||||
#include "tkm_kernel_ipsec.h"
|
||||
|
||||
/* declare test suite constructors */
|
||||
@ -75,11 +75,11 @@ static bool test_runner_init(bool init)
|
||||
lib->plugins->add_static_features(lib->plugins, "tkm-tests", features,
|
||||
countof(features), TRUE, NULL, NULL);
|
||||
|
||||
lib->settings->set_int(lib->settings, "%s.dh_mapping.%d", 1,
|
||||
lib->settings->set_int(lib->settings, "%s.ke_mapping.%d", 1,
|
||||
lib->ns, MODP_3072_BIT);
|
||||
lib->settings->set_int(lib->settings, "%s.dh_mapping.%d", 2,
|
||||
lib->settings->set_int(lib->settings, "%s.ke_mapping.%d", 2,
|
||||
lib->ns, MODP_4096_BIT);
|
||||
register_dh_mapping();
|
||||
register_ke_mapping();
|
||||
|
||||
plugin_loader_add_plugindirs(BUILDDIR "/src/libstrongswan/plugins",
|
||||
PLUGINS);
|
||||
@ -100,7 +100,7 @@ static bool test_runner_init(bool init)
|
||||
result = FALSE;
|
||||
}
|
||||
|
||||
destroy_dh_mapping();
|
||||
destroy_ke_mapping();
|
||||
libcharon_deinit();
|
||||
return result;
|
||||
}
|
||||
|
@ -19,6 +19,6 @@ TEST_SUITE(make_id_manager_tests)
|
||||
TEST_SUITE(make_chunk_map_tests)
|
||||
TEST_SUITE(make_utility_tests)
|
||||
TEST_SUITE_DEPEND(make_nonceg_tests, CUSTOM, "tkm")
|
||||
TEST_SUITE_DEPEND(make_diffie_hellman_tests, CUSTOM, "tkm")
|
||||
TEST_SUITE_DEPEND(make_key_exchange_tests, CUSTOM, "tkm")
|
||||
TEST_SUITE_DEPEND(make_keymat_tests, CUSTOM, "tkm")
|
||||
TEST_SUITE(make_kernel_sad_tests)
|
||||
|
@ -48,11 +48,6 @@ if USE_RADIUS
|
||||
libs += $(DESTDIR)$(ipseclibdir)/libradius.so
|
||||
endif
|
||||
|
||||
if USE_LIBNTTFFT
|
||||
deps += $(top_builddir)/src/libstrongswan/math/libnttfft/libnttfft.la
|
||||
libs += $(DESTDIR)$(ipseclibdir)/libnttfft.so
|
||||
endif
|
||||
|
||||
if USE_LIBPTTLS
|
||||
deps += $(top_builddir)/src/libpttls/libpttls.la
|
||||
libs += $(DESTDIR)$(ipseclibdir)/libpttls.so
|
||||
|
@ -67,6 +67,8 @@ static void build_checksum(char *path, char *name, char *sname)
|
||||
name, fsize, fsum, ssize, ssum);
|
||||
}
|
||||
|
||||
#if defined(S_PLUGINS) || defined(P_PLUGINS) || \
|
||||
defined(T_PLUGINS) || defined(C_PLUGINS)
|
||||
/**
|
||||
* Build checksums for a set of plugins
|
||||
*/
|
||||
@ -88,6 +90,7 @@ static void build_plugin_checksums(char *plugins)
|
||||
}
|
||||
enumerator->destroy(enumerator);
|
||||
}
|
||||
#endif
|
||||
|
||||
/**
|
||||
* Build checksums for a binary/library found at path
|
||||
|
@ -265,7 +265,7 @@ static peer_cfg_t *load_peer_config(private_config_t *this,
|
||||
.cert_policy = CERT_ALWAYS_SEND,
|
||||
.unique = UNIQUE_NO,
|
||||
.keyingtries = 1,
|
||||
.no_mobike = TRUE,
|
||||
.options = OPT_NO_MOBIKE,
|
||||
};
|
||||
|
||||
ike_cfg = load_ike_config(this, settings, config);
|
||||
|
@ -382,19 +382,28 @@ static void load_log_levels(file_logger_t *logger, char *section)
|
||||
*/
|
||||
static void load_logger_options(file_logger_t *logger, char *section)
|
||||
{
|
||||
char *time_format;
|
||||
bool add_ms, ike_name, log_level;
|
||||
file_logger_options_t options;
|
||||
|
||||
time_format = conftest->test->get_str(conftest->test,
|
||||
options.time_format = conftest->test->get_str(conftest->test,
|
||||
"log.%s.time_format", NULL, section);
|
||||
add_ms = conftest->test->get_bool(conftest->test,
|
||||
"log.%s.time_add_ms", FALSE, section);
|
||||
ike_name = conftest->test->get_bool(conftest->test,
|
||||
options.time_precision = file_logger_time_precision_parse(
|
||||
conftest->test->get_str(conftest->test,
|
||||
"log.%s.time_precision", NULL, section));
|
||||
/* handle legacy option */
|
||||
if (!options.time_precision &&
|
||||
conftest->test->get_bool(conftest->test,
|
||||
"log.%s.time_add_ms", FALSE, section))
|
||||
{
|
||||
options.time_precision = FILE_LOGGER_TIME_PRECISION_MS;
|
||||
}
|
||||
options.ike_name = conftest->test->get_bool(conftest->test,
|
||||
"log.%s.ike_name", FALSE, section);
|
||||
log_level = conftest->test->get_bool(conftest->test,
|
||||
options.log_level = conftest->test->get_bool(conftest->test,
|
||||
"log.%s.log_level", FALSE, section);
|
||||
options.json = conftest->test->get_bool(conftest->test,
|
||||
"log.%s.json", FALSE, section);
|
||||
|
||||
logger->set_options(logger, time_format, add_ms, ike_name, log_level);
|
||||
logger->set_options(logger, &options);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -438,6 +447,7 @@ int main(int argc, char *argv[])
|
||||
int sig;
|
||||
char *suite_file = "suite.conf", *test_file = NULL, *preload, *plugins;
|
||||
file_logger_t *logger;
|
||||
file_logger_options_t options = {};
|
||||
|
||||
if (!library_init(NULL, "conftest"))
|
||||
{
|
||||
@ -460,7 +470,7 @@ int main(int argc, char *argv[])
|
||||
lib->credmgr->add_set(lib->credmgr, &conftest->creds->set);
|
||||
|
||||
logger = file_logger_create("stdout");
|
||||
logger->set_options(logger, NULL, FALSE, FALSE, FALSE);
|
||||
logger->set_options(logger, &options);
|
||||
logger->open(logger, FALSE, FALSE);
|
||||
logger->set_level(logger, DBG_ANY, LEVEL_CTRL);
|
||||
charon->bus->add_logger(charon->bus, &logger->logger);
|
||||
|
@ -239,8 +239,8 @@ static bool build_auth(private_pretend_auth_t *this,
|
||||
}
|
||||
keymat = (keymat_v2_t*)ike_sa->get_keymat(ike_sa);
|
||||
if (!keymat->get_auth_octets(keymat, TRUE, this->ike_init, this->nonce,
|
||||
chunk_empty, this->id, this->reserved,
|
||||
&octets, NULL))
|
||||
chunk_empty, chunk_empty, this->id,
|
||||
this->reserved, &octets, NULL))
|
||||
{
|
||||
private->destroy(private);
|
||||
return FALSE;
|
||||
|
@ -138,7 +138,8 @@ static bool rebuild_auth(private_rebuild_auth_t *this, ike_sa_t *ike_sa,
|
||||
}
|
||||
keymat = (keymat_v2_t*)ike_sa->get_keymat(ike_sa);
|
||||
if (!keymat->get_auth_octets(keymat, FALSE, this->ike_init, this->nonce,
|
||||
chunk_empty, id, reserved, &octets, NULL))
|
||||
chunk_empty, chunk_empty, id, reserved,
|
||||
&octets, NULL))
|
||||
{
|
||||
private->destroy(private);
|
||||
id->destroy(id);
|
||||
|
@ -7,10 +7,10 @@ android {
|
||||
applicationId "org.strongswan.android"
|
||||
compileSdk 34
|
||||
minSdkVersion 21
|
||||
targetSdkVersion 33
|
||||
targetSdkVersion 34
|
||||
|
||||
versionCode 82
|
||||
versionName "2.5.1"
|
||||
versionCode 91
|
||||
versionName "2.5.6"
|
||||
|
||||
externalNativeBuild {
|
||||
ndkBuild {
|
||||
@ -19,7 +19,7 @@ android {
|
||||
}
|
||||
}
|
||||
|
||||
ndkVersion "26.1.10909125"
|
||||
ndkVersion "27.2.12479018"
|
||||
|
||||
externalNativeBuild {
|
||||
ndkBuild {
|
||||
@ -45,10 +45,10 @@ android {
|
||||
}
|
||||
|
||||
dependencies {
|
||||
implementation 'androidx.appcompat:appcompat:1.6.1'
|
||||
implementation 'androidx.lifecycle:lifecycle-process:2.7.0'
|
||||
implementation 'androidx.appcompat:appcompat:1.7.1'
|
||||
implementation 'androidx.lifecycle:lifecycle-process:2.9.1'
|
||||
implementation 'androidx.preference:preference:1.2.1'
|
||||
implementation 'com.google.android.material:material:1.10.0'
|
||||
implementation 'com.google.android.material:material:1.12.0'
|
||||
testImplementation 'junit:junit:4.13.2'
|
||||
testImplementation 'org.assertj:assertj-core:3.24.2'
|
||||
testImplementation 'org.mockito:mockito-core:5.8.0'
|
||||
|
@ -22,9 +22,11 @@
|
||||
<uses-permission android:name="android.permission.INTERNET" />
|
||||
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_SPECIAL_USE" />
|
||||
<uses-permission android:name="android.permission.POST_NOTIFICATIONS"/>
|
||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
|
||||
<uses-permission android:name="android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS" />
|
||||
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
|
||||
<!-- necessary to allow users to select ex-/included apps and EAP-TNC -->
|
||||
<uses-permission android:name="android.permission.QUERY_ALL_PACKAGES"
|
||||
tools:ignore="QueryAllPackagesPermission" />
|
||||
@ -35,6 +37,7 @@
|
||||
android:label="@string/app_name"
|
||||
android:theme="@style/ApplicationTheme"
|
||||
android:networkSecurityConfig="@xml/network_security_config"
|
||||
android:enableOnBackInvokedCallback="true"
|
||||
android:allowBackup="false" >
|
||||
<activity
|
||||
android:name=".ui.MainActivity"
|
||||
@ -176,10 +179,14 @@
|
||||
<service
|
||||
android:name=".logic.CharonVpnService"
|
||||
android:exported="false"
|
||||
android:foregroundServiceType="specialUse"
|
||||
android:permission="android.permission.BIND_VPN_SERVICE">
|
||||
<intent-filter>
|
||||
<action android:name="android.net.VpnService" />
|
||||
</intent-filter>
|
||||
<property
|
||||
android:name="android.app.PROPERTY_SPECIAL_USE_FGS_SUBTYPE"
|
||||
android:value="VpnService instance"/>
|
||||
</service>
|
||||
<service
|
||||
android:name=".ui.VpnTileService"
|
||||
|
@ -1,6 +1,6 @@
|
||||
/*
|
||||
* Copyright (C) 2023 Relution GmbH
|
||||
* Copyright (C) 2012-2024 Tobias Brunner
|
||||
* Copyright (C) 2012-2025 Tobias Brunner
|
||||
* Copyright (C) 2012 Giuliano Grassi
|
||||
* Copyright (C) 2012 Ralf Sager
|
||||
*
|
||||
@ -67,6 +67,9 @@ public class DatabaseHelper extends SQLiteOpenHelper
|
||||
new DbColumn(VpnProfileDataSource.KEY_IKE_PROPOSAL, "TEXT", 15),
|
||||
new DbColumn(VpnProfileDataSource.KEY_ESP_PROPOSAL, "TEXT", 15),
|
||||
new DbColumn(VpnProfileDataSource.KEY_DNS_SERVERS, "TEXT", 17),
|
||||
new DbColumn(VpnProfileDataSource.KEY_PROXY_HOST, "TEXT", 19),
|
||||
new DbColumn(VpnProfileDataSource.KEY_PROXY_PORT, "INTEGER", 19),
|
||||
new DbColumn(VpnProfileDataSource.KEY_PROXY_EXCLUSIONS, "TEXT", 19),
|
||||
});
|
||||
|
||||
public static final DbTable TABLE_TRUSTED_CERTIFICATE = new DbTable(TABLE_NAME_TRUSTED_CERTIFICATE, 18, new DbColumn[]{
|
||||
@ -84,7 +87,7 @@ public class DatabaseHelper extends SQLiteOpenHelper
|
||||
new DbColumn(ManagedUserCertificate.KEY_PASSWORD, "TEXT", 18),
|
||||
});
|
||||
|
||||
private static final int DATABASE_VERSION = 18;
|
||||
private static final int DATABASE_VERSION = 19;
|
||||
|
||||
private static final Set<DbTable> TABLES;
|
||||
|
||||
|
@ -121,6 +121,7 @@ public class ManagedConfiguration
|
||||
return Arrays.asList(bundles);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
@NonNull
|
||||
private static List<Bundle> getBundleArrayListCompat(final Bundle bundle, final String key)
|
||||
{
|
||||
|
@ -68,9 +68,9 @@ public class ManagedVpnProfile extends VpnProfile
|
||||
|
||||
setMTU(getInt(bundle, VpnProfileDataSource.KEY_MTU, Constants.MTU_MIN, Constants.MTU_MAX));
|
||||
setNATKeepAlive(getInt(bundle, VpnProfileDataSource.KEY_NAT_KEEPALIVE, Constants.NAT_KEEPALIVE_MIN, Constants.NAT_KEEPALIVE_MAX));
|
||||
setIkeProposal(bundle.getString(VpnProfileDataSource.KEY_IKE_PROPOSAL));
|
||||
setEspProposal(bundle.getString(VpnProfileDataSource.KEY_ESP_PROPOSAL));
|
||||
setDnsServers(bundle.getString(VpnProfileDataSource.KEY_DNS_SERVERS));
|
||||
setIkeProposal(getString(bundle, VpnProfileDataSource.KEY_IKE_PROPOSAL));
|
||||
setEspProposal(getString(bundle, VpnProfileDataSource.KEY_ESP_PROPOSAL));
|
||||
setDnsServers(getString(bundle, VpnProfileDataSource.KEY_DNS_SERVERS));
|
||||
flags = addPositiveFlag(flags, bundle, KEY_TRANSPORT_IPV6_FLAG, VpnProfile.FLAGS_IPv6_TRANSPORT);
|
||||
|
||||
final Bundle splitTunneling = bundle.getBundle(VpnProfileDataSource.KEY_SPLIT_TUNNELING);
|
||||
@ -79,8 +79,16 @@ public class ManagedVpnProfile extends VpnProfile
|
||||
splitFlags = addPositiveFlag(splitFlags, splitTunneling, KEY_SPLIT_TUNNELLING_BLOCK_IPV4_FLAG, VpnProfile.SPLIT_TUNNELING_BLOCK_IPV4);
|
||||
splitFlags = addPositiveFlag(splitFlags, splitTunneling, KEY_SPLIT_TUNNELLING_BLOCK_IPV6_FLAG, VpnProfile.SPLIT_TUNNELING_BLOCK_IPV6);
|
||||
|
||||
setExcludedSubnets(splitTunneling.getString(VpnProfileDataSource.KEY_EXCLUDED_SUBNETS));
|
||||
setIncludedSubnets(splitTunneling.getString(VpnProfileDataSource.KEY_INCLUDED_SUBNETS));
|
||||
setExcludedSubnets(getString(splitTunneling, VpnProfileDataSource.KEY_EXCLUDED_SUBNETS));
|
||||
setIncludedSubnets(getString(splitTunneling, VpnProfileDataSource.KEY_INCLUDED_SUBNETS));
|
||||
}
|
||||
|
||||
final Bundle proxyServer = bundle.getBundle(VpnProfileDataSource.KEY_PROXY_SERVER);
|
||||
if (proxyServer != null)
|
||||
{
|
||||
setProxyHost(getString(proxyServer, VpnProfileDataSource.KEY_PROXY_HOST));
|
||||
setProxyPort(getInt(proxyServer, VpnProfileDataSource.KEY_PROXY_PORT, 1, 65_535));
|
||||
setProxyExclusions(getString(proxyServer, VpnProfileDataSource.KEY_PROXY_EXCLUSIONS));
|
||||
}
|
||||
|
||||
setSplitTunneling(splitFlags);
|
||||
@ -110,7 +118,7 @@ public class ManagedVpnProfile extends VpnProfile
|
||||
|
||||
setGateway(remote.getString(VpnProfileDataSource.KEY_GATEWAY));
|
||||
setPort(getInt(remote, VpnProfileDataSource.KEY_PORT, 1, 65_535));
|
||||
setRemoteId(remote.getString(VpnProfileDataSource.KEY_REMOTE_ID));
|
||||
setRemoteId(getString(remote, VpnProfileDataSource.KEY_REMOTE_ID));
|
||||
|
||||
final String certificateData = remote.getString(VpnProfileDataSource.KEY_CERTIFICATE);
|
||||
if (!TextUtils.isEmpty(certificateData))
|
||||
@ -133,8 +141,9 @@ public class ManagedVpnProfile extends VpnProfile
|
||||
return flags;
|
||||
}
|
||||
|
||||
setLocalId(local.getString(VpnProfileDataSource.KEY_LOCAL_ID));
|
||||
setUsername(local.getString(VpnProfileDataSource.KEY_USERNAME));
|
||||
setLocalId(getString(local, VpnProfileDataSource.KEY_LOCAL_ID));
|
||||
setUsername(getString(local, VpnProfileDataSource.KEY_USERNAME));
|
||||
setPassword(getString(local, VpnProfileDataSource.KEY_PASSWORD));
|
||||
|
||||
final String userCertificateData = local.getString(VpnProfileDataSource.KEY_USER_CERTIFICATE);
|
||||
final String userCertificatePassword = local.getString(VpnProfileDataSource.KEY_USER_CERTIFICATE_PASSWORD, "");
|
||||
@ -154,6 +163,12 @@ public class ManagedVpnProfile extends VpnProfile
|
||||
return value < min || value > max ? null : value;
|
||||
}
|
||||
|
||||
private static String getString(final Bundle bundle, final String key)
|
||||
{
|
||||
final String value = bundle.getString(key);
|
||||
return TextUtils.isEmpty(value) ? null : value;
|
||||
}
|
||||
|
||||
private static int addPositiveFlag(int flags, Bundle bundle, String key, int flag)
|
||||
{
|
||||
if (bundle.getBoolean(key))
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2012-2019 Tobias Brunner
|
||||
* Copyright (C) 2012-2025 Tobias Brunner
|
||||
* Copyright (C) 2012 Giuliano Grassi
|
||||
* Copyright (C) 2012 Ralf Sager
|
||||
*
|
||||
@ -42,8 +42,8 @@ public class VpnProfile implements Cloneable
|
||||
|
||||
private String mName, mGateway, mUsername, mPassword, mCertificate, mUserCertificate;
|
||||
private String mRemoteId, mLocalId, mExcludedSubnets, mIncludedSubnets, mSelectedApps;
|
||||
private String mIkeProposal, mEspProposal, mDnsServers;
|
||||
private Integer mMTU, mPort, mSplitTunneling, mNATKeepAlive, mFlags;
|
||||
private String mIkeProposal, mEspProposal, mDnsServers, mProxyHost, mProxyExclusions;
|
||||
private Integer mMTU, mPort, mProxyPort, mSplitTunneling, mNATKeepAlive, mFlags;
|
||||
private SelectedAppsHandling mSelectedAppsHandling = SelectedAppsHandling.SELECTED_APPS_DISABLE;
|
||||
private VpnType mVpnType;
|
||||
private UUID mUUID;
|
||||
@ -313,6 +313,36 @@ public class VpnProfile implements Cloneable
|
||||
return mSelectedAppsHandling;
|
||||
}
|
||||
|
||||
public String getProxyHost()
|
||||
{
|
||||
return mProxyHost;
|
||||
}
|
||||
|
||||
public void setProxyHost(String proxy)
|
||||
{
|
||||
this.mProxyHost = proxy;
|
||||
}
|
||||
|
||||
public Integer getProxyPort()
|
||||
{
|
||||
return mProxyPort;
|
||||
}
|
||||
|
||||
public void setProxyPort(Integer port)
|
||||
{
|
||||
this.mProxyPort = port;
|
||||
}
|
||||
|
||||
public String getProxyExclusions()
|
||||
{
|
||||
return mProxyExclusions;
|
||||
}
|
||||
|
||||
public void setProxyExclusions(String exclusions)
|
||||
{
|
||||
this.mProxyExclusions = exclusions;
|
||||
}
|
||||
|
||||
public Integer getSplitTunneling()
|
||||
{
|
||||
return mSplitTunneling;
|
||||
|
@ -50,6 +50,10 @@ public interface VpnProfileDataSource
|
||||
String KEY_IKE_PROPOSAL = "ike_proposal";
|
||||
String KEY_ESP_PROPOSAL = "esp_proposal";
|
||||
String KEY_DNS_SERVERS = "dns_servers";
|
||||
String KEY_PROXY_SERVER = "proxy_server";
|
||||
String KEY_PROXY_HOST = "proxy_host";
|
||||
String KEY_PROXY_PORT = "proxy_port";
|
||||
String KEY_PROXY_EXCLUSIONS = "proxy_exclusions";
|
||||
String KEY_READ_ONLY = "read_only";
|
||||
|
||||
/**
|
||||
|
@ -1,4 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2025 Tobias Brunner
|
||||
* Copyright (C) 2023 Relution GmbH
|
||||
*
|
||||
* Copyright (C) secunet Security Networks AG
|
||||
@ -75,17 +76,14 @@ public class VpnProfileManagedDataSource implements VpnProfileDataSource
|
||||
@Override
|
||||
public boolean updateVpnProfile(VpnProfile profile)
|
||||
{
|
||||
final VpnProfile existingProfile = getVpnProfile(profile.getUUID());
|
||||
if (existingProfile == null)
|
||||
final VpnProfile managedProfile = mManagedConfigurationService.getManagedProfiles().get(profile.getUUID().toString());
|
||||
if (managedProfile == null)
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
final String password = profile.getPassword();
|
||||
existingProfile.setPassword(password);
|
||||
|
||||
final SharedPreferences.Editor editor = mSharedPreferences.edit();
|
||||
editor.putString(profile.getUUID().toString(), password);
|
||||
editor.putString(profile.getUUID().toString(), profile.getPassword());
|
||||
return editor.commit();
|
||||
}
|
||||
|
||||
@ -95,10 +93,28 @@ public class VpnProfileManagedDataSource implements VpnProfileDataSource
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clone and prepare the given managed profile before handing it out.
|
||||
* @param managedProfile profile to prepare
|
||||
*/
|
||||
private VpnProfile prepareVpnProfile(VpnProfile managedProfile)
|
||||
{
|
||||
final String password = mSharedPreferences.getString(managedProfile.getUUID().toString(), managedProfile.getPassword());
|
||||
final VpnProfile vpnProfile = managedProfile.clone();
|
||||
vpnProfile.setPassword(password);
|
||||
vpnProfile.setDataSource(this);
|
||||
return vpnProfile;
|
||||
}
|
||||
|
||||
@Override
|
||||
public VpnProfile getVpnProfile(UUID uuid)
|
||||
{
|
||||
return mManagedConfigurationService.getManagedProfiles().get(uuid.toString());
|
||||
final VpnProfile managedProfile = mManagedConfigurationService.getManagedProfiles().get(uuid.toString());
|
||||
if (managedProfile != null)
|
||||
{
|
||||
return prepareVpnProfile(managedProfile);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -106,12 +122,9 @@ public class VpnProfileManagedDataSource implements VpnProfileDataSource
|
||||
{
|
||||
final Map<String, ManagedVpnProfile> managedVpnProfiles = mManagedConfigurationService.getManagedProfiles();
|
||||
final List<VpnProfile> vpnProfiles = new ArrayList<>();
|
||||
for (final VpnProfile vpnProfile : managedVpnProfiles.values())
|
||||
for (final VpnProfile managedProfile : managedVpnProfiles.values())
|
||||
{
|
||||
final String password = mSharedPreferences.getString(vpnProfile.getUUID().toString(), vpnProfile.getPassword());
|
||||
vpnProfile.setPassword(password);
|
||||
vpnProfile.setDataSource(this);
|
||||
vpnProfiles.add(vpnProfile);
|
||||
vpnProfiles.add(prepareVpnProfile(managedProfile));
|
||||
}
|
||||
return vpnProfiles;
|
||||
}
|
||||
|
@ -151,6 +151,9 @@ public class VpnProfileSqlDataSource implements VpnProfileDataSource
|
||||
profile.setIkeProposal(cursor.getString(cursor.getColumnIndexOrThrow(KEY_IKE_PROPOSAL)));
|
||||
profile.setEspProposal(cursor.getString(cursor.getColumnIndexOrThrow(KEY_ESP_PROPOSAL)));
|
||||
profile.setDnsServers(cursor.getString(cursor.getColumnIndexOrThrow(KEY_DNS_SERVERS)));
|
||||
profile.setProxyHost(cursor.getString(cursor.getColumnIndexOrThrow(KEY_PROXY_HOST)));
|
||||
profile.setProxyPort(getInt(cursor, cursor.getColumnIndexOrThrow(KEY_PROXY_PORT)));
|
||||
profile.setProxyExclusions(cursor.getString(cursor.getColumnIndexOrThrow(KEY_PROXY_EXCLUSIONS)));
|
||||
return profile;
|
||||
}
|
||||
|
||||
@ -179,6 +182,9 @@ public class VpnProfileSqlDataSource implements VpnProfileDataSource
|
||||
values.put(KEY_IKE_PROPOSAL, profile.getIkeProposal());
|
||||
values.put(KEY_ESP_PROPOSAL, profile.getEspProposal());
|
||||
values.put(KEY_DNS_SERVERS, profile.getDnsServers());
|
||||
values.put(KEY_PROXY_HOST, profile.getProxyHost());
|
||||
values.put(KEY_PROXY_PORT, profile.getProxyPort());
|
||||
values.put(KEY_PROXY_EXCLUSIONS, profile.getProxyExclusions());
|
||||
return values;
|
||||
}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2012-2018 Tobias Brunner
|
||||
* Copyright (C) 2012-2025 Tobias Brunner
|
||||
* Copyright (C) 2012 Giuliano Grassi
|
||||
* Copyright (C) 2012 Ralf Sager
|
||||
*
|
||||
@ -30,6 +30,7 @@ import android.content.Intent;
|
||||
import android.content.ServiceConnection;
|
||||
import android.content.SharedPreferences;
|
||||
import android.content.pm.PackageManager;
|
||||
import android.net.ProxyInfo;
|
||||
import android.net.VpnService;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
@ -72,6 +73,8 @@ import java.security.PrivateKey;
|
||||
import java.security.cert.CertificateEncodingException;
|
||||
import java.security.cert.X509Certificate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.SortedSet;
|
||||
@ -399,11 +402,24 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
||||
public void run()
|
||||
{
|
||||
mShowNotification = false;
|
||||
stopForeground(true);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N)
|
||||
{
|
||||
stopForegroundCompat();
|
||||
}
|
||||
else
|
||||
{
|
||||
stopForeground(STOP_FOREGROUND_REMOVE);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private void stopForegroundCompat()
|
||||
{
|
||||
stopForeground(true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a notification channel for Android 8+
|
||||
*/
|
||||
@ -1103,6 +1119,7 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
||||
private final List<InetAddress> mDnsServers = new ArrayList<>();
|
||||
private int mMtu;
|
||||
private boolean mIPv4Seen, mIPv6Seen, mDnsServersConfigured;
|
||||
private ProxyInfo mProxyServer;
|
||||
|
||||
public BuilderCache(VpnProfile profile)
|
||||
{
|
||||
@ -1156,6 +1173,17 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
||||
}
|
||||
}
|
||||
|
||||
if (profile.getProxyHost() != null)
|
||||
{
|
||||
int port = profile.getProxyPort() != null ? profile.getProxyPort() : Constants.PROXY_PORT_DEFAULT;
|
||||
List<String> exclusions = new ArrayList<>();
|
||||
if (profile.getProxyExclusions() != null)
|
||||
{
|
||||
Collections.addAll(exclusions, profile.getProxyExclusions().split("\\s+"));
|
||||
}
|
||||
mProxyServer = ProxyInfo.buildDirectProxy(profile.getProxyHost(), port, exclusions);
|
||||
}
|
||||
|
||||
/* set a default MTU, will be set by the daemon for regular interfaces */
|
||||
Integer mtu = profile.getMTU();
|
||||
mMtu = mtu == null ? Constants.MTU_MAX : mtu;
|
||||
@ -1236,7 +1264,7 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
||||
}
|
||||
}
|
||||
|
||||
public void applyData(VpnService.Builder builder)
|
||||
public void applyData(Builder builder)
|
||||
{
|
||||
for (IPRange address : mAddresses)
|
||||
{
|
||||
@ -1362,6 +1390,10 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && mProxyServer != null)
|
||||
{
|
||||
builder.setHttpProxy(mProxyServer);
|
||||
}
|
||||
builder.setMtu(mMtu);
|
||||
}
|
||||
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
package org.strongswan.android.logic;
|
||||
|
||||
import android.annotation.SuppressLint;
|
||||
import android.app.AlarmManager;
|
||||
import android.app.PendingIntent;
|
||||
import android.content.BroadcastReceiver;
|
||||
@ -37,6 +38,7 @@ public class Scheduler extends BroadcastReceiver
|
||||
private final AlarmManager mManager;
|
||||
private final PriorityQueue<ScheduledJob> mJobs;
|
||||
|
||||
@SuppressLint("UnspecifiedRegisterReceiverFlag")
|
||||
public Scheduler(Context context)
|
||||
{
|
||||
mContext = context;
|
||||
@ -45,7 +47,14 @@ public class Scheduler extends BroadcastReceiver
|
||||
|
||||
IntentFilter filter = new IntentFilter();
|
||||
filter.addAction(EXECUTE_JOB);
|
||||
mContext.registerReceiver(this, filter);
|
||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU)
|
||||
{
|
||||
mContext.registerReceiver(this, filter, Context.RECEIVER_NOT_EXPORTED);
|
||||
}
|
||||
else
|
||||
{
|
||||
mContext.registerReceiver(this, filter);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -22,6 +22,7 @@ import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.Proxy;
|
||||
import java.net.SocketTimeoutException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
@ -55,7 +56,7 @@ public class SimpleFetcher
|
||||
}
|
||||
future = mExecutor.submit(() -> {
|
||||
URL url = new URL(uri);
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection(Proxy.NO_PROXY);
|
||||
conn.setConnectTimeout(10000);
|
||||
conn.setReadTimeout(10000);
|
||||
conn.setRequestProperty("Connection", "close");
|
||||
|
@ -1,6 +1,6 @@
|
||||
/*
|
||||
* Copyright (C) 2023 Relution GmbH
|
||||
* Copyright (C) 2014-2024 Tobias Brunner
|
||||
* Copyright (C) 2014-2025 Tobias Brunner
|
||||
*
|
||||
* Copyright (C) secunet Security Networks AG
|
||||
*
|
||||
@ -38,14 +38,10 @@ import java.util.concurrent.Executor;
|
||||
import java.util.concurrent.ExecutorService;
|
||||
import java.util.concurrent.Executors;
|
||||
|
||||
import androidx.annotation.NonNull;
|
||||
import androidx.core.os.HandlerCompat;
|
||||
import androidx.lifecycle.DefaultLifecycleObserver;
|
||||
import androidx.lifecycle.LifecycleOwner;
|
||||
import androidx.lifecycle.ProcessLifecycleOwner;
|
||||
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
|
||||
|
||||
public class StrongSwanApplication extends Application implements DefaultLifecycleObserver
|
||||
public class StrongSwanApplication extends Application
|
||||
{
|
||||
private static final String TAG = StrongSwanApplication.class.getSimpleName();
|
||||
|
||||
@ -80,6 +76,7 @@ public class StrongSwanApplication extends Application implements DefaultLifecyc
|
||||
public void onCreate()
|
||||
{
|
||||
super.onCreate();
|
||||
|
||||
StrongSwanApplication.mContext = getApplicationContext();
|
||||
StrongSwanApplication.mInstance = this;
|
||||
|
||||
@ -92,24 +89,12 @@ public class StrongSwanApplication extends Application implements DefaultLifecyc
|
||||
|
||||
mUserCertificateManager = new ManagedUserCertificateManager(mContext, mManagedConfigurationService, mDatabaseHelper);
|
||||
|
||||
ProcessLifecycleOwner.get().getLifecycle().addObserver(this);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onResume(@NonNull LifecycleOwner owner)
|
||||
{
|
||||
reloadManagedConfigurationAndNotifyListeners();
|
||||
|
||||
final IntentFilter restrictionsFilter = new IntentFilter(Intent.ACTION_APPLICATION_RESTRICTIONS_CHANGED);
|
||||
registerReceiver(mRestrictionsReceiver, restrictionsFilter);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void onPause(@NonNull LifecycleOwner owner)
|
||||
{
|
||||
unregisterReceiver(mRestrictionsReceiver);
|
||||
}
|
||||
|
||||
private void reloadManagedConfigurationAndNotifyListeners()
|
||||
{
|
||||
final Set<String> uuids = new HashSet<>(mManagedConfigurationService.getManagedProfiles().keySet());
|
||||
|
@ -1,5 +1,5 @@
|
||||
/*
|
||||
* Copyright (C) 2012-2015 Tobias Brunner
|
||||
* Copyright (C) 2012-2024 Tobias Brunner
|
||||
* Copyright (C) 2012 Giuliano Grassi
|
||||
* Copyright (C) 2012 Ralf Sager
|
||||
*
|
||||
@ -20,6 +20,8 @@ package org.strongswan.android.logic;
|
||||
|
||||
import android.util.Log;
|
||||
|
||||
import java.beans.PropertyChangeListener;
|
||||
import java.beans.PropertyChangeSupport;
|
||||
import java.security.KeyStore;
|
||||
import java.security.KeyStoreException;
|
||||
import java.security.cert.Certificate;
|
||||
@ -27,10 +29,9 @@ import java.security.cert.X509Certificate;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Enumeration;
|
||||
import java.util.Hashtable;
|
||||
import java.util.Observable;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
|
||||
public class TrustedCertificateManager extends Observable
|
||||
public class TrustedCertificateManager
|
||||
{
|
||||
private static final String TAG = TrustedCertificateManager.class.getSimpleName();
|
||||
private final ReentrantReadWriteLock mLock = new ReentrantReadWriteLock();
|
||||
@ -38,6 +39,7 @@ public class TrustedCertificateManager extends Observable
|
||||
private volatile boolean mReload;
|
||||
private boolean mLoaded;
|
||||
private final ArrayList<KeyStore> mKeyStores = new ArrayList<KeyStore>();
|
||||
private PropertyChangeSupport mObservers = new PropertyChangeSupport(this);
|
||||
|
||||
public enum TrustedCertificateSource
|
||||
{
|
||||
@ -98,6 +100,35 @@ public class TrustedCertificateManager extends Observable
|
||||
return Singleton.mInstance;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add an observer for changes to the trusted certificate store. There will
|
||||
* be a "storeChanged" property "change" when anything in the store changed.
|
||||
*
|
||||
* @param observer observer to add
|
||||
*/
|
||||
public void addObserver(PropertyChangeListener observer)
|
||||
{
|
||||
mObservers.addPropertyChangeListener(observer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove an observer for changes to the trusted certificate store.
|
||||
*
|
||||
* @param observer observer to remove
|
||||
*/
|
||||
public void deleteObserver(PropertyChangeListener observer)
|
||||
{
|
||||
mObservers.removePropertyChangeListener(observer);
|
||||
}
|
||||
|
||||
/**
|
||||
* Use a fake property with a forced change to notify observers.
|
||||
*/
|
||||
private void notifyObservers()
|
||||
{
|
||||
mObservers.firePropertyChange("storeChanged", false, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Invalidates the current load state so that the next call to load()
|
||||
* will force a reload of the cached CA certificates.
|
||||
@ -110,7 +141,6 @@ public class TrustedCertificateManager extends Observable
|
||||
{
|
||||
Log.d(TAG, "Force reload of cached CA certificates on next load");
|
||||
this.mReload = true;
|
||||
this.setChanged();
|
||||
this.notifyObservers();
|
||||
return this;
|
||||
}
|
||||
@ -152,7 +182,6 @@ public class TrustedCertificateManager extends Observable
|
||||
this.mCACerts = certs;
|
||||
if (!this.mLoaded)
|
||||
{
|
||||
this.setChanged();
|
||||
this.notifyObservers();
|
||||
this.mLoaded = true;
|
||||
}
|
||||
|
@ -16,6 +16,7 @@
|
||||
|
||||
package org.strongswan.android.ui;
|
||||
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.view.LayoutInflater;
|
||||
import android.view.View;
|
||||
@ -58,7 +59,14 @@ public class RemediationInstructionFragment extends ListFragment
|
||||
|
||||
if (savedInstanceState != null)
|
||||
{
|
||||
mInstruction = savedInstanceState.getParcelable(ARG_REMEDIATION_INSTRUCTION);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
||||
{
|
||||
mInstruction = getInstructionCompat(savedInstanceState);
|
||||
}
|
||||
else
|
||||
{
|
||||
mInstruction = savedInstanceState.getParcelable(ARG_REMEDIATION_INSTRUCTION, RemediationInstruction.class);
|
||||
}
|
||||
}
|
||||
/* show dividers only between list items */
|
||||
getListView().setHeaderDividersEnabled(false);
|
||||
@ -85,7 +93,14 @@ public class RemediationInstructionFragment extends ListFragment
|
||||
Bundle args = getArguments();
|
||||
if (args != null)
|
||||
{
|
||||
mInstruction = args.getParcelable(ARG_REMEDIATION_INSTRUCTION);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
||||
{
|
||||
mInstruction = getInstructionCompat(args);
|
||||
}
|
||||
else
|
||||
{
|
||||
mInstruction = args.getParcelable(ARG_REMEDIATION_INSTRUCTION, RemediationInstruction.class);
|
||||
}
|
||||
}
|
||||
updateView(mInstruction);
|
||||
}
|
||||
@ -117,4 +132,10 @@ public class RemediationInstructionFragment extends ListFragment
|
||||
setListAdapter(null);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
private static RemediationInstruction getInstructionCompat(Bundle bundle)
|
||||
{
|
||||
return bundle.getParcelable(ARG_REMEDIATION_INSTRUCTION);
|
||||
}
|
||||
}
|
||||
|
@ -43,7 +43,16 @@ public class RemediationInstructionsActivity extends AppCompatActivity implement
|
||||
if (frag != null)
|
||||
{ /* two-pane layout, update fragment */
|
||||
Bundle extras = getIntent().getExtras();
|
||||
ArrayList<RemediationInstruction> list = extras.getParcelableArrayList(RemediationInstructionsFragment.EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||
ArrayList<RemediationInstruction> list = null;
|
||||
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.TIRAMISU)
|
||||
{
|
||||
list = RemediationInstructionsFragment.getInstructionsCompat(extras);
|
||||
}
|
||||
else
|
||||
{
|
||||
list = extras.getParcelableArrayList(RemediationInstructionsFragment.EXTRA_REMEDIATION_INSTRUCTIONS,
|
||||
RemediationInstruction.class);
|
||||
}
|
||||
frag.updateView(list);
|
||||
}
|
||||
else
|
||||
|
@ -17,6 +17,7 @@
|
||||
package org.strongswan.android.ui;
|
||||
|
||||
import android.content.Context;
|
||||
import android.os.Build;
|
||||
import android.os.Bundle;
|
||||
import android.view.View;
|
||||
import android.widget.ListView;
|
||||
@ -55,7 +56,14 @@ public class RemediationInstructionsFragment extends ListFragment
|
||||
|
||||
if (savedInstanceState != null)
|
||||
{
|
||||
mInstructions = savedInstanceState.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
||||
{
|
||||
mInstructions = getInstructionsCompat(savedInstanceState);
|
||||
}
|
||||
else
|
||||
{
|
||||
mInstructions = savedInstanceState.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS, RemediationInstruction.class);
|
||||
}
|
||||
mCurrentPosition = savedInstanceState.getInt(KEY_POSITION);
|
||||
}
|
||||
}
|
||||
@ -93,7 +101,14 @@ public class RemediationInstructionsFragment extends ListFragment
|
||||
Bundle args = getArguments();
|
||||
if (mInstructions == null && args != null)
|
||||
{
|
||||
mInstructions = args.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
||||
{
|
||||
mInstructions = getInstructionsCompat(args);
|
||||
}
|
||||
else
|
||||
{
|
||||
mInstructions = args.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS, RemediationInstruction.class);
|
||||
}
|
||||
}
|
||||
updateView(mInstructions);
|
||||
|
||||
@ -123,4 +138,10 @@ public class RemediationInstructionsFragment extends ListFragment
|
||||
mInstructions = instructions;
|
||||
mAdapter.setData(mInstructions);
|
||||
}
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public static ArrayList<RemediationInstruction> getInstructionsCompat(Bundle bundle)
|
||||
{
|
||||
return bundle.getParcelableArrayList(RemediationInstructionsFragment.EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||
}
|
||||
}
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user