mirror of
https://github.com/strongswan/strongswan.git
synced 2025-08-20 00:01:59 -04:00
Compare commits
No commits in common. "master" and "5.9.14rc1" have entirely different histories.
25
.cirrus.yml
25
.cirrus.yml
@ -1,11 +1,11 @@
|
|||||||
freebsd_task:
|
task:
|
||||||
matrix:
|
matrix:
|
||||||
- name: FreeBSD 14.2
|
- name: FreeBSD 14.0
|
||||||
freebsd_instance:
|
freebsd_instance:
|
||||||
image_family: freebsd-14-2
|
image_family: freebsd-14-0
|
||||||
- name: FreeBSD 13.4
|
- name: FreeBSD 13.2
|
||||||
freebsd_instance:
|
freebsd_instance:
|
||||||
image_family: freebsd-13-4
|
image_family: freebsd-13-2
|
||||||
|
|
||||||
env:
|
env:
|
||||||
TESTS_REDUCED_KEYLENGTHS: yes
|
TESTS_REDUCED_KEYLENGTHS: yes
|
||||||
@ -16,18 +16,3 @@ freebsd_task:
|
|||||||
|
|
||||||
install_script: ./scripts/test.sh deps
|
install_script: ./scripts/test.sh deps
|
||||||
script: ./scripts/test.sh
|
script: ./scripts/test.sh
|
||||||
|
|
||||||
alpine_task:
|
|
||||||
container:
|
|
||||||
image: alpine:latest
|
|
||||||
|
|
||||||
env:
|
|
||||||
TESTS_REDUCED_KEYLENGTHS: yes
|
|
||||||
TESTS_NO_IPV6: yes
|
|
||||||
LEAK_DETECTIVE: no
|
|
||||||
MONOLITHIC: no
|
|
||||||
TEST: alpine
|
|
||||||
OS_NAME: alpine
|
|
||||||
|
|
||||||
install_script: ./scripts/test.sh deps
|
|
||||||
script: ./scripts/test.sh
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
ignore:
|
ignore:
|
||||||
- "**/suites/"
|
- "*/suites/*"
|
||||||
- "**/tests/"
|
- "*/tests/*"
|
||||||
|
1
.github/ISSUE_TEMPLATE/bug_report.md
vendored
1
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@ -2,7 +2,6 @@
|
|||||||
name: "🐛 Bug report"
|
name: "🐛 Bug report"
|
||||||
about: Report a reproducible bug or regression
|
about: Report a reproducible bug or regression
|
||||||
labels: bug, new
|
labels: bug, new
|
||||||
type: Bug
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
1
.github/ISSUE_TEMPLATE/feature_request.md
vendored
1
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@ -2,7 +2,6 @@
|
|||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
labels: enhancement, new
|
labels: enhancement, new
|
||||||
type: Feature
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<!--
|
<!--
|
||||||
|
3
.github/actions/default/action.yml
vendored
3
.github/actions/default/action.yml
vendored
@ -5,6 +5,9 @@ runs:
|
|||||||
- name: "Install Dependencies"
|
- name: "Install Dependencies"
|
||||||
run: ./scripts/test.sh deps
|
run: ./scripts/test.sh deps
|
||||||
shell: bash
|
shell: bash
|
||||||
|
- name: "Install Python Dependencies"
|
||||||
|
run: ./scripts/test.sh pydeps
|
||||||
|
shell: bash
|
||||||
- name: "Build Dependencies"
|
- name: "Build Dependencies"
|
||||||
run: ./scripts/test.sh build-deps
|
run: ./scripts/test.sh build-deps
|
||||||
shell: bash
|
shell: bash
|
||||||
|
20
.github/codeql/cpp-queries/chunk_from_chars.ql
vendored
20
.github/codeql/cpp-queries/chunk_from_chars.ql
vendored
@ -10,7 +10,8 @@
|
|||||||
* @precision very-high
|
* @precision very-high
|
||||||
*/
|
*/
|
||||||
import cpp
|
import cpp
|
||||||
import semmle.code.cpp.dataflow.new.DataFlow
|
import DataFlow::PathGraph
|
||||||
|
import semmle.code.cpp.dataflow.DataFlow
|
||||||
|
|
||||||
class ChunkFromChars extends Expr {
|
class ChunkFromChars extends Expr {
|
||||||
ChunkFromChars() {
|
ChunkFromChars() {
|
||||||
@ -22,30 +23,29 @@ class ChunkFromChars extends Expr {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module ChunkFromCharsConfig implements DataFlow::ConfigSig {
|
class ChunkFromCharsUsage extends DataFlow::Configuration {
|
||||||
predicate isSource(DataFlow::Node source) {
|
ChunkFromCharsUsage() { this = "ChunkFromCharsUsage" }
|
||||||
|
|
||||||
|
override predicate isSource(DataFlow::Node source) {
|
||||||
source.asExpr() instanceof ChunkFromChars
|
source.asExpr() instanceof ChunkFromChars
|
||||||
}
|
}
|
||||||
|
|
||||||
predicate isSink(DataFlow::Node sink) {
|
override predicate isSink(DataFlow::Node sink) {
|
||||||
exists(sink.asExpr())
|
exists(sink.asExpr())
|
||||||
}
|
}
|
||||||
|
|
||||||
predicate isBarrierOut(DataFlow::Node node) {
|
override predicate isBarrierOut(DataFlow::Node node) {
|
||||||
/* don't track beyond function calls */
|
/* don't track beyond function calls */
|
||||||
exists(FunctionCall fc | node.asExpr().getParent*() = fc)
|
exists(FunctionCall fc | node.asExpr().getParent*() = fc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
module ChunkFromCharsFlow = DataFlow::Global<ChunkFromCharsConfig>;
|
|
||||||
import ChunkFromCharsFlow::PathGraph
|
|
||||||
|
|
||||||
BlockStmt enclosingBlock(BlockStmt b) {
|
BlockStmt enclosingBlock(BlockStmt b) {
|
||||||
result = b.getEnclosingBlock()
|
result = b.getEnclosingBlock()
|
||||||
}
|
}
|
||||||
|
|
||||||
from ChunkFromCharsFlow::PathNode source, ChunkFromCharsFlow::PathNode sink
|
from ChunkFromCharsUsage usage, DataFlow::PathNode source, DataFlow::PathNode sink
|
||||||
where
|
where
|
||||||
ChunkFromCharsFlow::flowPath(source, sink)
|
usage.hasFlowPath(source, sink)
|
||||||
and not source.getNode().asExpr().getEnclosingBlock() = enclosingBlock*(sink.getNode().asExpr().getEnclosingBlock())
|
and not source.getNode().asExpr().getEnclosingBlock() = enclosingBlock*(sink.getNode().asExpr().getEnclosingBlock())
|
||||||
select source, source, sink, "Invalid use of chunk_from_chars() result in sibling/parent block."
|
select source, source, sink, "Invalid use of chunk_from_chars() result in sibling/parent block."
|
||||||
|
4
.github/workflows/android.yml
vendored
4
.github/workflows/android.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
android:
|
android:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
@ -36,7 +36,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
NDK_VERSION=$(grep "ndkVersion" src/frontends/android/app/build.gradle | sed -e 's/.*"\(.*\)"/\1/')
|
NDK_VERSION=$(grep "ndkVersion" src/frontends/android/app/build.gradle | sed -e 's/.*"\(.*\)"/\1/')
|
||||||
echo Using NDK ${NDK_VERSION}
|
echo Using NDK ${NDK_VERSION}
|
||||||
yes | ${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager --install "ndk;${NDK_VERSION}"
|
yes | sudo ${ANDROID_HOME}/cmdline-tools/latest/bin/sdkmanager --install "ndk;${NDK_VERSION}"
|
||||||
echo "ANDROID_NDK_ROOT=${ANDROID_HOME}/ndk/${NDK_VERSION}" >> "$GITHUB_OUTPUT"
|
echo "ANDROID_NDK_ROOT=${ANDROID_HOME}/ndk/${NDK_VERSION}" >> "$GITHUB_OUTPUT"
|
||||||
- uses: actions/cache@v4
|
- uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
|
2
.github/workflows/codeql.yml
vendored
2
.github/workflows/codeql.yml
vendored
@ -19,7 +19,7 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
analyze:
|
analyze:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
|
44
.github/workflows/linux.yml
vendored
44
.github/workflows/linux.yml
vendored
@ -21,12 +21,12 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
latest:
|
latest:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
if: ${{ needs.pre-check.outputs.should_skip != 'true' }}
|
if: ${{ needs.pre-check.outputs.should_skip != 'true' }}
|
||||||
runs-on: ${{ matrix.os || 'ubuntu-latest' }}
|
runs-on: ubuntu-latest
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
test: [ all, default, printf-builtin ]
|
test: [ all, default, printf-builtin ]
|
||||||
@ -48,9 +48,6 @@ jobs:
|
|||||||
- test: no-dbg
|
- test: no-dbg
|
||||||
- test: no-dbg
|
- test: no-dbg
|
||||||
compiler: clang
|
compiler: clang
|
||||||
- test: no-testable-ke
|
|
||||||
- test: no-testable-ke
|
|
||||||
compiler: clang
|
|
||||||
- test: fuzzing
|
- test: fuzzing
|
||||||
compiler: clang
|
compiler: clang
|
||||||
monolithic: yes
|
monolithic: yes
|
||||||
@ -79,13 +76,7 @@ jobs:
|
|||||||
- uses: ./.github/actions/default
|
- uses: ./.github/actions/default
|
||||||
- run: ccache -s
|
- run: ccache -s
|
||||||
- if: ${{ success() && matrix.test == 'coverage' }}
|
- if: ${{ success() && matrix.test == 'coverage' }}
|
||||||
uses: codecov/codecov-action@v4
|
run: bash <(curl -s https://codecov.io/bash)
|
||||||
with:
|
|
||||||
disable_search: true
|
|
||||||
fail_ci_if_error: true
|
|
||||||
file: coverage/coverage.cleaned.info
|
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
|
||||||
verbose: true
|
|
||||||
- if: ${{ failure() }}
|
- if: ${{ failure() }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
with:
|
with:
|
||||||
@ -93,24 +84,24 @@ jobs:
|
|||||||
path: config.log
|
path: config.log
|
||||||
retention-days: 5
|
retention-days: 5
|
||||||
|
|
||||||
crypto:
|
crypto-plugins:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
if: ${{ needs.pre-check.outputs.should_skip != 'true' }}
|
if: ${{ needs.pre-check.outputs.should_skip != 'true' }}
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
|
os: [ ubuntu-latest, ubuntu-20.04 ]
|
||||||
test: [ botan, wolfssl, openssl, openssl-3, openssl-awslc, gcrypt ]
|
test: [ botan, wolfssl, openssl, openssl-3, openssl-awslc, gcrypt ]
|
||||||
os: [ ubuntu-latest, ubuntu-22.04 ]
|
|
||||||
leak-detective: [ no, yes ]
|
leak-detective: [ no, yes ]
|
||||||
exclude:
|
exclude:
|
||||||
# test custom-built libs only on the latest platform
|
# test custom-built libs only on one platform
|
||||||
- os: ubuntu-22.04
|
- os: ubuntu-20.04
|
||||||
test: botan
|
test: botan
|
||||||
- os: ubuntu-22.04
|
- os: ubuntu-20.04
|
||||||
test: wolfssl
|
test: wolfssl
|
||||||
- os: ubuntu-22.04
|
- os: ubuntu-20.04
|
||||||
test: openssl-3
|
test: openssl-3
|
||||||
- os: ubuntu-22.04
|
- os: ubuntu-20.04
|
||||||
test: openssl-awslc
|
test: openssl-awslc
|
||||||
env:
|
env:
|
||||||
LEAK_DETECTIVE: ${{ matrix.leak-detective || 'no' }}
|
LEAK_DETECTIVE: ${{ matrix.leak-detective || 'no' }}
|
||||||
@ -135,12 +126,6 @@ jobs:
|
|||||||
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||||
ccache -z
|
ccache -z
|
||||||
- uses: ./.github/actions/default
|
- uses: ./.github/actions/default
|
||||||
- name: Test Vectors (detailed)
|
|
||||||
env:
|
|
||||||
TESTS_SUITES: vectors
|
|
||||||
TESTS_VERBOSITY: 1
|
|
||||||
run:
|
|
||||||
./scripts/test.sh
|
|
||||||
- run: ccache -s
|
- run: ccache -s
|
||||||
- if: ${{ failure() }}
|
- if: ${{ failure() }}
|
||||||
uses: actions/upload-artifact@v4
|
uses: actions/upload-artifact@v4
|
||||||
@ -155,7 +140,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ ubuntu-22.04 ]
|
os: [ ubuntu-20.04 ]
|
||||||
test: [ all, nm ]
|
test: [ all, nm ]
|
||||||
compiler: [ gcc, clang ]
|
compiler: [ gcc, clang ]
|
||||||
exclude:
|
exclude:
|
||||||
@ -165,14 +150,13 @@ jobs:
|
|||||||
LEAK_DETECTIVE: ${{ matrix.leak-detective || 'no' }}
|
LEAK_DETECTIVE: ${{ matrix.leak-detective || 'no' }}
|
||||||
CC: ${{ matrix.compiler || 'gcc' }}
|
CC: ${{ matrix.compiler || 'gcc' }}
|
||||||
TEST: ${{ matrix.test }}
|
TEST: ${{ matrix.test }}
|
||||||
|
# LSan causes spurious SIGSEGV after tests due to DTLS handling by glibc
|
||||||
|
ASAN_OPTIONS: intercept_tls_get_addr=0
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- uses: actions/cache@v4
|
- uses: actions/cache@v4
|
||||||
with:
|
with:
|
||||||
# path is different on newer systems
|
path: ~/.ccache
|
||||||
path: |
|
|
||||||
~/.cache/ccache
|
|
||||||
~/.ccache
|
|
||||||
key: ccache-${{ matrix.os }}-${{ env.CC }}-${{ matrix.test }}-${{ github.sha }}
|
key: ccache-${{ matrix.os }}-${{ env.CC }}-${{ matrix.test }}-${{ github.sha }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
ccache-${{ matrix.os }}-${{ env.CC }}-${{ matrix.test }}-
|
ccache-${{ matrix.os }}-${{ env.CC }}-${{ matrix.test }}-
|
||||||
|
2
.github/workflows/macos.yml
vendored
2
.github/workflows/macos.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
macos:
|
macos:
|
||||||
strategy:
|
strategy:
|
||||||
|
32
.github/workflows/sonarcloud.yml
vendored
32
.github/workflows/sonarcloud.yml
vendored
@ -17,7 +17,7 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
sonarcloud:
|
sonarcloud:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
@ -33,6 +33,7 @@ jobs:
|
|||||||
with:
|
with:
|
||||||
path: |
|
path: |
|
||||||
~/.cache/ccache
|
~/.cache/ccache
|
||||||
|
~/.sonar-cache
|
||||||
key: ccache-sonarcloud-${{ github.sha }}
|
key: ccache-sonarcloud-${{ github.sha }}
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
ccache-sonarcloud-
|
ccache-sonarcloud-
|
||||||
@ -40,17 +41,24 @@ jobs:
|
|||||||
sudo apt-get install -qq ccache
|
sudo apt-get install -qq ccache
|
||||||
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
echo "PATH=/usr/lib/ccache:$PATH" >> $GITHUB_ENV
|
||||||
ccache -z
|
ccache -z
|
||||||
- uses: SonarSource/sonarqube-scan-action/install-build-wrapper@v4
|
# using SonarSource/sonarcloud-github-action is currently not recommended
|
||||||
- run: |
|
# for C builds, so we follow the "any CI" instructions
|
||||||
echo "BUILD_WRAPPER_OUT_DIR=$HOME/bw-output" >> $GITHUB_ENV
|
- name: Install sonar-scanner
|
||||||
- uses: ./.github/actions/default
|
|
||||||
- uses: SonarSource/sonarqube-scan-action@v4
|
|
||||||
env:
|
env:
|
||||||
|
SONAR_SCANNER_VERSION: 5.0.1.3006
|
||||||
|
run: |
|
||||||
|
export SONAR_SCANNER_HOME=$HOME/.sonar/sonar-scanner-$SONAR_SCANNER_VERSION-linux
|
||||||
|
curl --create-dirs -sSLo $HOME/.sonar/sonar-scanner.zip https://binaries.sonarsource.com/Distribution/sonar-scanner-cli/sonar-scanner-cli-$SONAR_SCANNER_VERSION-linux.zip
|
||||||
|
unzip -o $HOME/.sonar/sonar-scanner.zip -d $HOME/.sonar/
|
||||||
|
echo "SONAR_SCANNER_OPTS=-server" >> $GITHUB_ENV
|
||||||
|
curl --create-dirs -sSLo $HOME/.sonar/build-wrapper-linux-x86.zip https://sonarcloud.io/static/cpp/build-wrapper-linux-x86.zip
|
||||||
|
unzip -o $HOME/.sonar/build-wrapper-linux-x86.zip -d $HOME/.sonar/
|
||||||
|
echo "PATH=$HOME/.sonar/build-wrapper-linux-x86:$SONAR_SCANNER_HOME/bin:$PATH" >> $GITHUB_ENV
|
||||||
|
- env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
BUILD_NUMBER: ${{ github.run_id }}
|
||||||
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
|
||||||
with:
|
SONAR_PROJECT: ${{ secrets.SONAR_PROJECT }}
|
||||||
args: >
|
SONAR_ORGANIZATION: ${{ secrets.SONAR_ORGANIZATION }}
|
||||||
-Dsonar.projectKey=${{ secrets.SONAR_PROJECT }}
|
uses: ./.github/actions/default
|
||||||
-Dsonar.organization=${{ secrets.SONAR_ORGANIZATION }}
|
|
||||||
-Dsonar.cfamily.threads=2
|
|
||||||
-Dsonar.cfamily.compile-commands=${{ env.BUILD_WRAPPER_OUT_DIR }}/compile_commands.json
|
|
||||||
- run: ccache -s
|
- run: ccache -s
|
||||||
|
2
.github/workflows/tkm.yml
vendored
2
.github/workflows/tkm.yml
vendored
@ -18,7 +18,7 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
tkm:
|
tkm:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
|
2
.github/workflows/windows.yml
vendored
2
.github/workflows/windows.yml
vendored
@ -21,7 +21,7 @@ jobs:
|
|||||||
- id: skip-check
|
- id: skip-check
|
||||||
uses: fkirc/skip-duplicate-actions@master
|
uses: fkirc/skip-duplicate-actions@master
|
||||||
with:
|
with:
|
||||||
concurrent_skipping: 'same_content_newer'
|
concurrent_skipping: 'same_content'
|
||||||
|
|
||||||
cross-compile:
|
cross-compile:
|
||||||
needs: pre-check
|
needs: pre-check
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -27,7 +27,6 @@ libtool
|
|||||||
y.tab.[ch]
|
y.tab.[ch]
|
||||||
lex.yy.c
|
lex.yy.c
|
||||||
*keywords.c
|
*keywords.c
|
||||||
!proposal_keywords.c
|
|
||||||
plugin_constructors.c
|
plugin_constructors.c
|
||||||
Doxyfile
|
Doxyfile
|
||||||
apidoc/
|
apidoc/
|
||||||
@ -55,4 +54,3 @@ coverage/
|
|||||||
/*.includes
|
/*.includes
|
||||||
test-driver
|
test-driver
|
||||||
nbproject/
|
nbproject/
|
||||||
*.[si]
|
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
Please refer to the [developer documentation](https://docs.strongswan.org/docs/latest/devs/devs.html)
|
Please refer to the [developer documentation](https://docs.strongswan.org/docs/5.9/devs/devs.html)
|
||||||
in our documentation for details regarding **code style** and
|
in our documentation for details regarding **code style** and
|
||||||
[**contribution requirements**](https://docs.strongswan.org/docs/latest/devs/contributions.html).
|
[**contribution requirements**](https://docs.strongswan.org/docs/5.9/devs/contributions.html).
|
||||||
|
1208
Doxyfile.in
1208
Doxyfile.in
File diff suppressed because it is too large
Load Diff
1
HACKING
1
HACKING
@ -14,6 +14,7 @@ the code, you need the following tools:
|
|||||||
- autoconf
|
- autoconf
|
||||||
- libtool
|
- libtool
|
||||||
- pkg-config
|
- pkg-config
|
||||||
|
- gettext
|
||||||
- perl
|
- perl
|
||||||
- python
|
- python
|
||||||
- lex/flex
|
- lex/flex
|
||||||
|
2
INSTALL
2
INSTALL
@ -144,4 +144,4 @@ Contents
|
|||||||
|
|
||||||
For a more up-to-date list of recommended modules refer to:
|
For a more up-to-date list of recommended modules refer to:
|
||||||
|
|
||||||
* https://docs.strongswan.org/docs/latest/install/kernelModules.html
|
* https://docs.strongswan.org/docs/5.9/install/kernelModules.html
|
||||||
|
@ -65,11 +65,10 @@ cov-reset: cov-reset-common
|
|||||||
cov-report:
|
cov-report:
|
||||||
@mkdir $(top_builddir)/coverage
|
@mkdir $(top_builddir)/coverage
|
||||||
lcov -c -o $(top_builddir)/coverage/coverage.info -d $(top_builddir) \
|
lcov -c -o $(top_builddir)/coverage/coverage.info -d $(top_builddir) \
|
||||||
--rc branch_coverage=1
|
--rc lcov_branch_coverage=1
|
||||||
lcov -r $(top_builddir)/coverage/coverage.info '*/tests/*' '*/suites/*' '/usr*' \
|
lcov -r $(top_builddir)/coverage/coverage.info '*/tests/*' '*/suites/*' '/usr*' \
|
||||||
'*proposal_keywords_static.*' \
|
|
||||||
-o $(abs_top_builddir)/coverage/coverage.cleaned.info \
|
-o $(abs_top_builddir)/coverage/coverage.cleaned.info \
|
||||||
--rc branch_coverage=1
|
--rc lcov_branch_coverage=1
|
||||||
genhtml --num-spaces 4 --legend --branch-coverage --ignore-errors source \
|
genhtml --num-spaces 4 --legend --branch-coverage --ignore-errors source \
|
||||||
-t "$(PACKAGE_STRING)" \
|
-t "$(PACKAGE_STRING)" \
|
||||||
-o $(top_builddir)/coverage/html \
|
-o $(top_builddir)/coverage/html \
|
||||||
|
138
NEWS
138
NEWS
@ -1,143 +1,13 @@
|
|||||||
strongswan-6.0.2
|
|
||||||
----------------
|
|
||||||
|
|
||||||
- Support for per-CPU SAs (RFC 9611) has been added (Linux 6.13+).
|
|
||||||
|
|
||||||
- Basic support for AGGFRAG mode (RFC 9347) has been added (Linux 6.14+).
|
|
||||||
|
|
||||||
- POSIX regular expressions can be used to match remote identities.
|
|
||||||
|
|
||||||
- Switching configs based on EAP-Identities is supported. Setting
|
|
||||||
`remote.eap_id` now always initiates an EAP-Identity exchange.
|
|
||||||
|
|
||||||
- On Linux, sequence numbers from acquires are used when installing SAs. This
|
|
||||||
allows handling narrowing properly.
|
|
||||||
|
|
||||||
- During rekeying, the narrowed traffic selectors are now proposed instead of
|
|
||||||
the configured ones.
|
|
||||||
|
|
||||||
- The default AH/ESP proposals contain all supported key exchange methods plus
|
|
||||||
`none` to make PFS optional and accept proposals of older peers.
|
|
||||||
|
|
||||||
- GRO for ESP in enabled for NAT-T UDP sockets, which can improve performance
|
|
||||||
if the esp4|6_offload modules are loaded.
|
|
||||||
|
|
||||||
- charon-nm sets the VPN connection as persistent, preventing NetworkManager
|
|
||||||
from tearing down the connection if the network connectivity changes.
|
|
||||||
|
|
||||||
- ML-KEM is supported via OpenSSL 3.5+.
|
|
||||||
|
|
||||||
- The wolfssl plugin is now compatible to wolfSSL's FIPS module.
|
|
||||||
|
|
||||||
- The libsoup plugin has been migrated to libsoup 3, libsoup 2 is not supported
|
|
||||||
anymore.
|
|
||||||
|
|
||||||
- The long defunct uci plugin has been removed.
|
|
||||||
|
|
||||||
- Log messages by watcher_t are now logged in a separate log group (`wch`).
|
|
||||||
|
|
||||||
|
|
||||||
strongswan-6.0.1
|
|
||||||
----------------
|
|
||||||
|
|
||||||
- The ha plugin supports IKE and Child SAs with multiple key exchanges.
|
|
||||||
Incomplete IKE_SAs are now destroyed during a failover.
|
|
||||||
|
|
||||||
- The new `interface_receive` option for the dhcp plugin allows binding the
|
|
||||||
receive socket to a different interface than the send socket. Also fixed a
|
|
||||||
regression if the DHCP server is running on the same host.
|
|
||||||
|
|
||||||
- The new `source` option for the eap-radius plugin allows sending RADIUS
|
|
||||||
messages from a specific IP address.
|
|
||||||
|
|
||||||
- Self-signed root CAs without policies are now excluded from policy validation.
|
|
||||||
|
|
||||||
- Inbound traffic on IPsec SAs is now ignored when sending DPDs unless
|
|
||||||
UDP-encapsulation is used.
|
|
||||||
|
|
||||||
- Send IKE_SA_INIT from NAT-T socket if not connecting to port 500.
|
|
||||||
|
|
||||||
- Local traffic selectors can be configured for charon-nm. Its default
|
|
||||||
retransmission settings have been set to those of the Android app.
|
|
||||||
|
|
||||||
- The vici Python wheel is now built via `build` frontend instead of calling
|
|
||||||
setup.py directly if --enable-python-wheels is used (the option to build eggs
|
|
||||||
has been removed). There is no option to automatically install the wheel (use
|
|
||||||
pip instead) and the --enable-python-eggs-install option has been removed.
|
|
||||||
|
|
||||||
|
|
||||||
strongswan-6.0.0
|
|
||||||
----------------
|
|
||||||
|
|
||||||
- Support of multiple post-quantum (and classic) key exchanges using the
|
|
||||||
IKE_INTERMEDIATE exchange (RFC 9242) and the Additional Key Exchange
|
|
||||||
transform types 1..7 (RFC 9370).
|
|
||||||
|
|
||||||
- ML-KEM is provided by the botan, wolfssl, openssl (only via AWS-LC) and the
|
|
||||||
new ml plugins.
|
|
||||||
|
|
||||||
- Handling of CHILD_SA rekey collisions has been improved, which makes CHILD_SAs
|
|
||||||
properly trackable via chiled_rekey() hook.
|
|
||||||
|
|
||||||
- The behavior when reloading or unloading connections that include `start` in
|
|
||||||
their `start_action` has been improved.
|
|
||||||
|
|
||||||
- The default identity is now the subject DN instead of the IP address if a
|
|
||||||
certificate is available.
|
|
||||||
|
|
||||||
- The file logger supports logging as JSON objects and can add timestamps
|
|
||||||
in microseconds.
|
|
||||||
|
|
||||||
- The cert-enroll script now supports three generations of CA certificates.
|
|
||||||
|
|
||||||
- charon-nm uses a different routing table than the regular IKE daemon to avoid
|
|
||||||
conflicts if both are running.
|
|
||||||
|
|
||||||
- AF_VSOCK sockets are supported on Linux to communicate with a daemon that runs
|
|
||||||
in a VM.
|
|
||||||
|
|
||||||
- TUN devices can properly handle IPv6 addresses.
|
|
||||||
|
|
||||||
- For compatibility with older SCEP implementations, challenge passwords in
|
|
||||||
PKCS#10 containers are again encoded as PrintableString if possible.
|
|
||||||
|
|
||||||
- The legacy stroke plugin is no longer enabled by default.
|
|
||||||
|
|
||||||
- The openssl plugin is now enabled by default, while the following crypto
|
|
||||||
plugins are no longer enabled by default: aes, curve25519, des, fips-prf, gmp,
|
|
||||||
hmac, md5, pkcs12, rc2, sha1, sha2.
|
|
||||||
|
|
||||||
- The following deprecated plugins have been removed: bliss, newhope, ntru.
|
|
||||||
|
|
||||||
- charon.make_before_break is now enabled by default.
|
|
||||||
|
|
||||||
|
|
||||||
strongswan-5.9.14
|
strongswan-5.9.14
|
||||||
-----------------
|
-----------------
|
||||||
|
|
||||||
- Support for the IKEv2 OCSP extensions (RFC 4806) has been added, which allows
|
|
||||||
peers to request and send OCSP responses directly in IKEv2.
|
|
||||||
|
|
||||||
- Validation of X.509 name constraints in the constraints plugin has been
|
|
||||||
refactored to align with RFC 5280.
|
|
||||||
|
|
||||||
- The dhcp plugin has been ported to FreeBSD/macOS.
|
|
||||||
|
|
||||||
- The openssl plugin is now compatible with AWS-LC.
|
|
||||||
|
|
||||||
- Overflows of unique identifiers (e.g. Netlink sequence numbers or reqids) are
|
|
||||||
now handled gracefully.
|
|
||||||
|
|
||||||
- Updated the pkcs11.h header based on the latest OpenSC version in order to
|
- Updated the pkcs11.h header based on the latest OpenSC version in order to
|
||||||
include new algorithm and struct definitions for the pkcs11 plugin.
|
include new algorithm and struct definitions for the pkcs11 plugin .
|
||||||
Added support for PSS padding in smartcard-based RSA signatures using either
|
Added support for PSS padding in smartcard-based RSA signatures using either
|
||||||
on-chip or external data hashing.
|
on-chip or external data hashing.
|
||||||
|
|
||||||
- Added keyid and certid handles in the pki --ocsp command so that keys and/or
|
- Added keyid and certid handles in the pki --ocsp command so that keys and/or
|
||||||
certificates can be stored on a smartcard or in a TPM 2.0 device.
|
certificates can stored on a smartcard or in a TPM 2.0 device.
|
||||||
|
|
||||||
- Fail SA installation on Linux if replay protection is disabled while ESN is
|
|
||||||
enabled, which the kernel currently doesn't support.
|
|
||||||
|
|
||||||
|
|
||||||
strongswan-5.9.13
|
strongswan-5.9.13
|
||||||
@ -476,7 +346,7 @@ strongswan-5.9.4
|
|||||||
salt lengths.
|
salt lengths.
|
||||||
This vulnerability has been registered as CVE-2021-41990.
|
This vulnerability has been registered as CVE-2021-41990.
|
||||||
|
|
||||||
- Fixed a denial-of-service vulnerability in the in-memory certificate cache
|
- Fixed a denial-of-service vulnerabililty in the in-memory certificate cache
|
||||||
if certificates are replaced and a very large random value caused an integer
|
if certificates are replaced and a very large random value caused an integer
|
||||||
overflow.
|
overflow.
|
||||||
This vulnerability has been registered as CVE-2021-41991.
|
This vulnerability has been registered as CVE-2021-41991.
|
||||||
@ -1888,7 +1758,7 @@ strongswan-5.0.3
|
|||||||
PT-TLS (RFC 6876), a Posture Transport Protocol over TLS.
|
PT-TLS (RFC 6876), a Posture Transport Protocol over TLS.
|
||||||
|
|
||||||
- The charon systime-fix plugin can disable certificate lifetime checks on
|
- The charon systime-fix plugin can disable certificate lifetime checks on
|
||||||
embedded systems if the system time is obviously out of sync after boot-up.
|
embedded systems if the system time is obviously out of sync after bootup.
|
||||||
Certificates lifetimes get checked once the system time gets sane, closing
|
Certificates lifetimes get checked once the system time gets sane, closing
|
||||||
or reauthenticating connections using expired certificates.
|
or reauthenticating connections using expired certificates.
|
||||||
|
|
||||||
|
@ -566,7 +566,7 @@ to generate a traditional 3072 bit RSA key and store it in binary DER format.
|
|||||||
As an alternative a **TPM 2.0** *Trusted Platform Module* available on every
|
As an alternative a **TPM 2.0** *Trusted Platform Module* available on every
|
||||||
recent Intel platform could be used as a virtual smartcard to securely store an
|
recent Intel platform could be used as a virtual smartcard to securely store an
|
||||||
RSA or ECDSA private key. For details, refer to the TPM 2.0
|
RSA or ECDSA private key. For details, refer to the TPM 2.0
|
||||||
[HOWTO](https://docs.strongswan.org/docs/latest/tpm/tpm2.html).
|
[HOWTO](https://docs.strongswan.org/docs/5.9/tpm/tpm2.html).
|
||||||
|
|
||||||
In a next step the command
|
In a next step the command
|
||||||
|
|
||||||
|
@ -16,7 +16,6 @@ options = \
|
|||||||
options/charon-systemd.opt \
|
options/charon-systemd.opt \
|
||||||
options/imcv.opt \
|
options/imcv.opt \
|
||||||
options/imv_policy_manager.opt \
|
options/imv_policy_manager.opt \
|
||||||
options/iptfs.opt \
|
|
||||||
options/manager.opt \
|
options/manager.opt \
|
||||||
options/medsrv.opt \
|
options/medsrv.opt \
|
||||||
options/pki.opt \
|
options/pki.opt \
|
||||||
@ -32,6 +31,7 @@ plugins = \
|
|||||||
plugins/android_log.opt \
|
plugins/android_log.opt \
|
||||||
plugins/attr.opt \
|
plugins/attr.opt \
|
||||||
plugins/attr-sql.opt \
|
plugins/attr-sql.opt \
|
||||||
|
plugins/bliss.opt \
|
||||||
plugins/botan.opt \
|
plugins/botan.opt \
|
||||||
plugins/bypass-lan.opt \
|
plugins/bypass-lan.opt \
|
||||||
plugins/certexpire.opt \
|
plugins/certexpire.opt \
|
||||||
@ -77,6 +77,7 @@ plugins = \
|
|||||||
plugins/kernel-pfroute.opt \
|
plugins/kernel-pfroute.opt \
|
||||||
plugins/load-tester.opt \
|
plugins/load-tester.opt \
|
||||||
plugins/lookip.opt \
|
plugins/lookip.opt \
|
||||||
|
plugins/ntru.opt \
|
||||||
plugins/openssl.opt \
|
plugins/openssl.opt \
|
||||||
plugins/openxpki.opt \
|
plugins/openxpki.opt \
|
||||||
plugins/osx-attr.opt \
|
plugins/osx-attr.opt \
|
||||||
|
@ -55,6 +55,14 @@ man pages) the following format can be used:
|
|||||||
|
|
||||||
full.section.name.include files/to/include
|
full.section.name.include files/to/include
|
||||||
Description of this include statement
|
Description of this include statement
|
||||||
|
|
||||||
|
Dots in section/option names may be escaped with a backslash. For instance,
|
||||||
|
with the following section description
|
||||||
|
|
||||||
|
charon.filelog./var/log/daemon\.log {}
|
||||||
|
Section to define logging into /var/log/daemon.log
|
||||||
|
|
||||||
|
/var/log/daemon.log will be the name of the last section.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
@ -66,10 +74,10 @@ from functools import cmp_to_key, total_ordering
|
|||||||
@total_ordering
|
@total_ordering
|
||||||
class ConfigOption:
|
class ConfigOption:
|
||||||
"""Representing a configuration option or described section in strongswan.conf"""
|
"""Representing a configuration option or described section in strongswan.conf"""
|
||||||
def __init__(self, fullname, default = None, section = False, commented = False, include = False):
|
def __init__(self, path, default = None, section = False, commented = False, include = False):
|
||||||
self.path = fullname.split('.')
|
self.path = path
|
||||||
self.name = self.path[-1]
|
self.name = path[-1]
|
||||||
self.fullname = fullname
|
self.fullname = '.'.join(path)
|
||||||
self.default = default
|
self.default = default
|
||||||
self.section = section
|
self.section = section
|
||||||
self.commented = commented
|
self.commented = commented
|
||||||
@ -133,7 +141,8 @@ class Parser:
|
|||||||
if m:
|
if m:
|
||||||
if self.__current:
|
if self.__current:
|
||||||
self.__add_option(self.__current)
|
self.__add_option(self.__current)
|
||||||
self.__current = ConfigOption(m.group('name'), m.group('default'),
|
path = self.__split_name(m.group('name'))
|
||||||
|
self.__current = ConfigOption(path, m.group('default'),
|
||||||
commented = not m.group('assign'))
|
commented = not m.group('assign'))
|
||||||
return
|
return
|
||||||
# section definition
|
# section definition
|
||||||
@ -141,7 +150,8 @@ class Parser:
|
|||||||
if m:
|
if m:
|
||||||
if self.__current:
|
if self.__current:
|
||||||
self.__add_option(self.__current)
|
self.__add_option(self.__current)
|
||||||
self.__current = ConfigOption(m.group('name'), section = True,
|
path = self.__split_name(m.group('name'))
|
||||||
|
self.__current = ConfigOption(path, section = True,
|
||||||
commented = m.group('comment'))
|
commented = m.group('comment'))
|
||||||
return
|
return
|
||||||
# include definition
|
# include definition
|
||||||
@ -149,7 +159,8 @@ class Parser:
|
|||||||
if m:
|
if m:
|
||||||
if self.__current:
|
if self.__current:
|
||||||
self.__add_option(self.__current)
|
self.__add_option(self.__current)
|
||||||
self.__current = ConfigOption(m.group('name'), m.group('pattern'), include = True)
|
path = self.__split_name(m.group('name'))
|
||||||
|
self.__current = ConfigOption(path, m.group('pattern'), include = True)
|
||||||
return
|
return
|
||||||
# paragraph separator
|
# paragraph separator
|
||||||
m = re.match(r'^\s*$', line)
|
m = re.match(r'^\s*$', line)
|
||||||
@ -160,6 +171,10 @@ class Parser:
|
|||||||
if m and self.__current:
|
if m and self.__current:
|
||||||
self.__current.add(m.group('text'))
|
self.__current.add(m.group('text'))
|
||||||
|
|
||||||
|
def __split_name(self, name):
|
||||||
|
"""Split the given full name in a list of section/option names"""
|
||||||
|
return [x.replace('\.', '.') for x in re.split(r'(?<!\\)\.', name)]
|
||||||
|
|
||||||
def __add_option(self, option):
|
def __add_option(self, option):
|
||||||
"""Adds the given option to the abstract storage"""
|
"""Adds the given option to the abstract storage"""
|
||||||
option.desc = [desc for desc in option.desc if len(desc)]
|
option.desc = [desc for desc in option.desc if len(desc)]
|
||||||
@ -179,14 +194,12 @@ class Parser:
|
|||||||
"""Searches/Creates the option (section) based on a list of section names"""
|
"""Searches/Creates the option (section) based on a list of section names"""
|
||||||
option = None
|
option = None
|
||||||
options = self.options
|
options = self.options
|
||||||
fullname = ""
|
for i, name in enumerate(path, 1):
|
||||||
for name in path:
|
|
||||||
fullname += '.' + name if len(fullname) else name
|
|
||||||
option = next((x for x in options if x.name == name and x.section), None)
|
option = next((x for x in options if x.name == name and x.section), None)
|
||||||
if not option:
|
if not option:
|
||||||
if not create:
|
if not create:
|
||||||
break
|
break
|
||||||
option = ConfigOption(fullname, section = True)
|
option = ConfigOption(path[:i], section = True)
|
||||||
options.append(option)
|
options.append(option)
|
||||||
if self.sort:
|
if self.sort:
|
||||||
options.sort()
|
options.sort()
|
||||||
@ -195,7 +208,7 @@ class Parser:
|
|||||||
|
|
||||||
def get_option(self, name):
|
def get_option(self, name):
|
||||||
"""Retrieves the option with the given name"""
|
"""Retrieves the option with the given name"""
|
||||||
return self.__get_option(name.split('.'))
|
return self.__get_option(self.__split_name(name))
|
||||||
|
|
||||||
class TagReplacer:
|
class TagReplacer:
|
||||||
"""Replaces formatting tags in text"""
|
"""Replaces formatting tags in text"""
|
||||||
@ -241,7 +254,6 @@ class GroffTagReplacer(TagReplacer):
|
|||||||
if not punct:
|
if not punct:
|
||||||
punct = ''
|
punct = ''
|
||||||
text = re.sub(r'[\r\n\t]', ' ', m.group('text'))
|
text = re.sub(r'[\r\n\t]', ' ', m.group('text'))
|
||||||
text = re.sub(r'"', '""', text)
|
|
||||||
return '{0}.R{1} "{2}" "{3}" "{4}"\n'.format(nl, format, brack, text, punct)
|
return '{0}.R{1} "{2}" "{3}" "{4}"\n'.format(nl, format, brack, text, punct)
|
||||||
return replacer
|
return replacer
|
||||||
|
|
||||||
@ -306,8 +318,7 @@ class ManFormatter:
|
|||||||
def __groffize(self, text):
|
def __groffize(self, text):
|
||||||
"""Encode text as groff text"""
|
"""Encode text as groff text"""
|
||||||
text = self.__tags.replace(text)
|
text = self.__tags.replace(text)
|
||||||
text = re.sub(r'\\(?!-)', '\\[rs]', text)
|
text = re.sub(r'(?<!\\)-', r'\\-', text)
|
||||||
text = re.sub(r'(?<!\\)-', '\\-', text)
|
|
||||||
# remove any leading whitespace
|
# remove any leading whitespace
|
||||||
return re.sub(r'^\s+', '', text, flags = re.MULTILINE)
|
return re.sub(r'^\s+', '', text, flags = re.MULTILINE)
|
||||||
|
|
||||||
|
@ -26,18 +26,8 @@ charon.filelog.<name>.flush_line = no
|
|||||||
Enabling this option disables block buffering and enables line buffering.
|
Enabling this option disables block buffering and enables line buffering.
|
||||||
|
|
||||||
charon.filelog.<name>.ike_name = no
|
charon.filelog.<name>.ike_name = no
|
||||||
Add the connection name and a unique numerical identifier for the current
|
Prefix each log entry with the connection name and a unique numerical
|
||||||
IKE_SA to each log entry if available.
|
identifier for each IKE_SA.
|
||||||
|
|
||||||
charon.filelog.<name>.json = no
|
|
||||||
If enabled, each log entry is written to the file as a JSON object.
|
|
||||||
|
|
||||||
Enables writing each log entry as a JSON object to the file. The properties
|
|
||||||
are "time" (if `time_format` is set), "thread", "group", "level" and "msg".
|
|
||||||
Newlines, double quotes and backslashes are escaped in the latter. If
|
|
||||||
`ike_name` is enabled, "ikesa-uniqueid" and "ikesa-name" are added to the
|
|
||||||
object if available. The `log_level` option does not apply if this is
|
|
||||||
enabled.
|
|
||||||
|
|
||||||
charon.filelog.<name>.log_level = no
|
charon.filelog.<name>.log_level = no
|
||||||
Add the log level of each message after the subsystem (e.g. [IKE2]).
|
Add the log level of each message after the subsystem (e.g. [IKE2]).
|
||||||
@ -46,10 +36,9 @@ charon.filelog.<name>.time_format
|
|||||||
Prefix each log entry with a timestamp. The option accepts a format string
|
Prefix each log entry with a timestamp. The option accepts a format string
|
||||||
as passed to **strftime**(3).
|
as passed to **strftime**(3).
|
||||||
|
|
||||||
charon.filelog.<name>.time_precision =
|
charon.filelog.<name>.time_add_ms = no
|
||||||
Add the milliseconds (_ms_) or microseconds (_us_) within the current second
|
Adds the milliseconds within the current second after the timestamp
|
||||||
after the timestamp (separated by a dot, so _time_format_ should end
|
(separated by a dot, so _time_format_ should end with %S or %T).
|
||||||
with %S or %T). By default, nothing is added.
|
|
||||||
|
|
||||||
charon.syslog {}
|
charon.syslog {}
|
||||||
Section to define syslog loggers, see LOGGER CONFIGURATION in
|
Section to define syslog loggers, see LOGGER CONFIGURATION in
|
||||||
|
@ -1,55 +1,6 @@
|
|||||||
charon-nm {}
|
|
||||||
Section with settings specific to the NetworkManager backend `charon-nm`.
|
|
||||||
Settings from the `charon` section are not inherited, but many can be used
|
|
||||||
here as well. Defaults for some settings are chosen very deliberately and
|
|
||||||
should only be changed in case of conflicts.
|
|
||||||
|
|
||||||
charon-nm.ca_dir = <default>
|
charon-nm.ca_dir = <default>
|
||||||
Directory from which to load CA certificates if no certificate is
|
Directory from which to load CA certificates if no certificate is
|
||||||
configured.
|
configured.
|
||||||
|
|
||||||
charon-nm.install_virtual_ip_on = lo
|
|
||||||
Interface on which virtual IP addresses are installed. Note that NM
|
|
||||||
also installs the virtual IPs on the XFRM interface.
|
|
||||||
|
|
||||||
charon-nm.mtu = 1400
|
charon-nm.mtu = 1400
|
||||||
MTU for XFRM interfaces created by the NM plugin.
|
MTU for XFRM interfaces created by the NM plugin.
|
||||||
|
|
||||||
charon-nm.port = 0
|
|
||||||
Source port when sending packets to port 500. Defaults to an ephemeral
|
|
||||||
port. May be set to 500 if firewall rules require a static port.
|
|
||||||
|
|
||||||
charon-nm.port_nat_t = 0
|
|
||||||
Source port when sending packets to port 4500 or a custom server port.
|
|
||||||
Defaults to an ephemeral port. May be set to e.g. 4500 if firewall rules
|
|
||||||
require a static port.
|
|
||||||
|
|
||||||
charon-nm.retransmit_base = 1.4
|
|
||||||
Base to use for calculating exponential back off, see IKEv2 RETRANSMISSION
|
|
||||||
in **strongswan.conf**(5). Default retransmission settings for charon-nm are
|
|
||||||
deliberately lower to fail and possibly reestablish SAs more quickly.
|
|
||||||
|
|
||||||
charon-nm.retransmit_timeout = 2.0
|
|
||||||
Timeout in seconds before sending first retransmit.
|
|
||||||
|
|
||||||
charon-nm.retransmit_tries = 3
|
|
||||||
Number of times to retransmit a packet before giving up.
|
|
||||||
|
|
||||||
charon-nm.routing_table = 210
|
|
||||||
Table where routes via XFRM interface are installed. Should be different
|
|
||||||
than the table used for the regular IKE daemon due to the mark.
|
|
||||||
|
|
||||||
charon-nm.routing_table_prio = 210
|
|
||||||
Priority of the routing table. Higher than the default priority used for the
|
|
||||||
regular IKE daemon.
|
|
||||||
|
|
||||||
charon-nm.plugins.kernel-netlink.fwmark = !210
|
|
||||||
Make packets with this mark ignore the routing table. Must be the same mark
|
|
||||||
set in charon-nm.plugins.socket-default.fwmark.
|
|
||||||
|
|
||||||
charon-nm.plugins.socket-default.fwmark = 210
|
|
||||||
Mark applied to IKE and ESP packets to ignore the routing table and avoid
|
|
||||||
routing loops when using XFRM interfaces.
|
|
||||||
|
|
||||||
charon-nm.syslog.daemon.default = 1
|
|
||||||
Default to logging via syslog's daemon facility on level 1.
|
|
||||||
|
@ -154,16 +154,8 @@ charon.fragment_size = 1280
|
|||||||
Maximum size (complete IP datagram size in bytes) of a sent IKE fragment
|
Maximum size (complete IP datagram size in bytes) of a sent IKE fragment
|
||||||
when using proprietary IKEv1 or standardized IKEv2 fragmentation, defaults
|
when using proprietary IKEv1 or standardized IKEv2 fragmentation, defaults
|
||||||
to 1280 (use 0 for address family specific default values, which uses a
|
to 1280 (use 0 for address family specific default values, which uses a
|
||||||
lower value for IPv4). Unless overridden, this limit is used for both IPv4
|
lower value for IPv4). If specified this limit is used for both IPv4 and
|
||||||
and IPv6 if specified.
|
IPv6.
|
||||||
|
|
||||||
charon.fragment_size_v4 = charon.fragment_size
|
|
||||||
Maximum size (complete IPv4 datagram size in bytes) of a sent IKE fragment
|
|
||||||
when using proprietary IKEv1 or standardized IKEv2 fragmentation.
|
|
||||||
|
|
||||||
charon.fragment_size_v6 = charon.fragment_size
|
|
||||||
Maximum size (complete IPv6 datagram size in bytes) of a sent IKE fragment
|
|
||||||
when using proprietary IKEv1 or standardized IKEv2 fragmentation.
|
|
||||||
|
|
||||||
charon.group
|
charon.group
|
||||||
Name of the group the daemon changes to after startup.
|
Name of the group the daemon changes to after startup.
|
||||||
@ -291,7 +283,7 @@ charon.max_ikev1_exchanges = 3
|
|||||||
charon.max_packet = 10000
|
charon.max_packet = 10000
|
||||||
Maximum packet size accepted by charon.
|
Maximum packet size accepted by charon.
|
||||||
|
|
||||||
charon.make_before_break = yes
|
charon.make_before_break = no
|
||||||
Initiate IKEv2 reauthentication with a make-before-break scheme.
|
Initiate IKEv2 reauthentication with a make-before-break scheme.
|
||||||
|
|
||||||
Initiate IKEv2 reauthentication with a make-before-break instead of a
|
Initiate IKEv2 reauthentication with a make-before-break instead of a
|
||||||
|
@ -1,38 +0,0 @@
|
|||||||
charon.iptfs {}
|
|
||||||
Global settings for IP-TFS (RFC 9347). The Linux kernel supports this mode
|
|
||||||
since 6.14. However, it currently only supports aggregation/fragmentation of
|
|
||||||
tunneled IP packets in ESP/AGGFRAG packets. It doesn't yet support other
|
|
||||||
IP-TFS features like sending packets at a constant rate or congestion control.
|
|
||||||
|
|
||||||
charon.iptfs.drop_time = 1000000
|
|
||||||
Time in microseconds to wait for out-of-order packets when processing
|
|
||||||
inbound traffic.
|
|
||||||
|
|
||||||
charon.iptfs.reorder_window = 3
|
|
||||||
Number of packets that may arrive out of order when processing inbound
|
|
||||||
traffic.
|
|
||||||
|
|
||||||
charon.iptfs.init_delay = 0
|
|
||||||
Time in microseconds to wait for subsequent packets to aggregate together
|
|
||||||
when sending outbound traffic. Only relevant if no packets are already
|
|
||||||
queued to be sent.
|
|
||||||
|
|
||||||
charon.iptfs.max_queue_size = 1048576
|
|
||||||
Maximum number of bytes allowed to be queued for sending on the tunnel
|
|
||||||
(default 1 MiB). If the queue is full, packets are dropped.
|
|
||||||
|
|
||||||
charon.iptfs.packet_size = 0
|
|
||||||
Maximum outer packet size (layer 3) when sending packets. The default of 0
|
|
||||||
will use the PMTU as packet size. Note that the kernel currently doesn't
|
|
||||||
pad smaller packets.
|
|
||||||
|
|
||||||
charon.iptfs.accept_fragments = yes
|
|
||||||
Whether fragments of inner packets across multiple AGGFRAG payloads are
|
|
||||||
accepted. This is an IKEv2 option, so if the peer doesn't adhere to this
|
|
||||||
request and still sends such fragments, they will be processed by the
|
|
||||||
kernel.
|
|
||||||
|
|
||||||
charon.iptfs.dont_frag = no
|
|
||||||
Force disabling fragmenting inner packets across multiple AGGFRAG payloads
|
|
||||||
when sending outbound traffic (fragmentation is automatically disabled if
|
|
||||||
the peer indicates that it doesn't support handling such packets).
|
|
2
conf/plugins/bliss.opt
Normal file
2
conf/plugins/bliss.opt
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
charon.plugins.bliss.use_bliss_b = yes
|
||||||
|
Use the enhanced BLISS-B key generation and signature algorithm.
|
@ -36,13 +36,3 @@ charon.plugins.dhcp.interface
|
|||||||
Interface name the plugin uses for address allocation. The default is to
|
Interface name the plugin uses for address allocation. The default is to
|
||||||
bind to any (0.0.0.0) and let the system decide which way to route the
|
bind to any (0.0.0.0) and let the system decide which way to route the
|
||||||
packets to the DHCP server.
|
packets to the DHCP server.
|
||||||
|
|
||||||
charon.plugins.dhcp.interface_receive = charon.plugins.dhcp.interface
|
|
||||||
Interface name the plugin uses to bind its receive socket.
|
|
||||||
|
|
||||||
Interface name the plugin uses to bind its receive socket. The default is
|
|
||||||
to use the same interface as the send socket. Set it to the empty string
|
|
||||||
to avoid binding the receive socket to any interface while the send socket
|
|
||||||
is bound to one. If the server runs on the same host and the send socket is
|
|
||||||
bound to an interface, it might be necessary to set this to `lo` or the
|
|
||||||
empty string.
|
|
||||||
|
@ -84,9 +84,6 @@ charon.plugins.eap-radius.secret =
|
|||||||
charon.plugins.eap-radius.server =
|
charon.plugins.eap-radius.server =
|
||||||
IP/Hostname of RADIUS server.
|
IP/Hostname of RADIUS server.
|
||||||
|
|
||||||
charon.plugins.eap-radius.source =
|
|
||||||
Optional specific source IP to use.
|
|
||||||
|
|
||||||
charon.plugins.eap-radius.retransmit_base = 1.4
|
charon.plugins.eap-radius.retransmit_base = 1.4
|
||||||
Base to use for calculating exponential back off.
|
Base to use for calculating exponential back off.
|
||||||
|
|
||||||
@ -99,12 +96,12 @@ charon.plugins.eap-radius.retransmit_tries = 4
|
|||||||
charon.plugins.eap-radius.servers {}
|
charon.plugins.eap-radius.servers {}
|
||||||
Section to specify multiple RADIUS servers.
|
Section to specify multiple RADIUS servers.
|
||||||
|
|
||||||
Section to specify multiple RADIUS servers. The **source**,
|
Section to specify multiple RADIUS servers. The **nas_identifier**,
|
||||||
**nas_identifier**, **secret**, **sockets** and **port** (or **auth_port**)
|
**secret**, **sockets** and **port** (or **auth_port**) options can be
|
||||||
options can be specified for each server. A server's IP/Hostname can be
|
specified for each server. A server's IP/Hostname can be configured using
|
||||||
configured using the **address** option. The **acct_port** [1813] option can
|
the **address** option. The **acct_port** [1813] option can be used to
|
||||||
be used to specify the port used for RADIUS accounting. For each RADIUS
|
specify the port used for RADIUS accounting. For each RADIUS server a
|
||||||
server a priority can be specified using the **preference** [0] option. The
|
priority can be specified using the **preference** [0] option. The
|
||||||
retransmission time for each server can set set using **retransmit_base**,
|
retransmission time for each server can set set using **retransmit_base**,
|
||||||
**retransmit_timeout** and **retransmit_tries**.
|
**retransmit_timeout** and **retransmit_tries**.
|
||||||
|
|
||||||
|
4
conf/plugins/ntru.opt
Normal file
4
conf/plugins/ntru.opt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
charon.plugins.ntru.parameter_set = optimum
|
||||||
|
The following parameter sets are available: **x9_98_speed**,
|
||||||
|
**x9_98_bandwidth**, **x9_98_balance** and **optimum**, the last set not
|
||||||
|
being part of the X9.98 standard but having the best performance.
|
143
configure.ac
143
configure.ac
@ -20,7 +20,7 @@
|
|||||||
# initialize & set some vars
|
# initialize & set some vars
|
||||||
# ============================
|
# ============================
|
||||||
|
|
||||||
AC_INIT([strongSwan],[6.0.2])
|
AC_INIT([strongSwan],[5.9.14rc1])
|
||||||
AM_INIT_AUTOMAKE(m4_esyscmd([
|
AM_INIT_AUTOMAKE(m4_esyscmd([
|
||||||
echo tar-ustar
|
echo tar-ustar
|
||||||
echo subdir-objects
|
echo subdir-objects
|
||||||
@ -33,18 +33,21 @@ AM_INIT_AUTOMAKE(m4_esyscmd([
|
|||||||
esac
|
esac
|
||||||
]))
|
]))
|
||||||
m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES])
|
m4_ifdef([AM_SILENT_RULES], [AM_SILENT_RULES])
|
||||||
AC_CONFIG_MACRO_DIRS([m4/config m4/macros])
|
AC_CONFIG_MACRO_DIR([m4/config])
|
||||||
AC_CONFIG_HEADERS([config.h])
|
AC_CONFIG_HEADERS([config.h])
|
||||||
AC_DEFINE([CONFIG_H_INCLUDED], [], [defined if config.h included])
|
AC_DEFINE([CONFIG_H_INCLUDED], [], [defined if config.h included])
|
||||||
AC_DISABLE_STATIC
|
AC_DISABLE_STATIC
|
||||||
PKG_PROG_PKG_CONFIG
|
PKG_PROG_PKG_CONFIG
|
||||||
|
|
||||||
|
m4_include(m4/macros/split-package-version.m4)
|
||||||
SPLIT_PACKAGE_VERSION
|
SPLIT_PACKAGE_VERSION
|
||||||
|
|
||||||
# =================================
|
# =================================
|
||||||
# check --enable-xxx & --with-xxx
|
# check --enable-xxx & --with-xxx
|
||||||
# =================================
|
# =================================
|
||||||
|
|
||||||
|
m4_include(m4/macros/with.m4)
|
||||||
|
|
||||||
ARG_WITH_SUBST([random-device], [/dev/random], [set the device to read real random data from])
|
ARG_WITH_SUBST([random-device], [/dev/random], [set the device to read real random data from])
|
||||||
ARG_WITH_SUBST([urandom-device], [/dev/urandom], [set the device to read pseudo random data from])
|
ARG_WITH_SUBST([urandom-device], [/dev/urandom], [set the device to read pseudo random data from])
|
||||||
ARG_WITH_SUBST([strongswan-conf], [${sysconfdir}/strongswan.conf], [set the strongswan.conf file location])
|
ARG_WITH_SUBST([strongswan-conf], [${sysconfdir}/strongswan.conf], [set the strongswan.conf file location])
|
||||||
@ -67,7 +70,7 @@ ARG_WITH_SET([mpz_powm_sec], [yes], [use the more side-channel resistant
|
|||||||
ARG_WITH_SET([dev-headers], [no], [install strongSwan development headers to directory.])
|
ARG_WITH_SET([dev-headers], [no], [install strongSwan development headers to directory.])
|
||||||
ARG_WITH_SET([printf-hooks], [auto], [force the use of a specific printf hook implementation (auto, builtin, glibc, vstr).])
|
ARG_WITH_SET([printf-hooks], [auto], [force the use of a specific printf hook implementation (auto, builtin, glibc, vstr).])
|
||||||
ARG_WITH_SET([rubygemdir], ["gem environment gemdir"], [path to install ruby gems to])
|
ARG_WITH_SET([rubygemdir], ["gem environment gemdir"], [path to install ruby gems to])
|
||||||
ARG_WITH_SET([testable-ke], [yes], [make key exchange implementations testable by providing a set_seed() method])
|
ARG_WITH_SET([pythoneggdir], ["main site-packages directory"], [path to install python eggs to to])
|
||||||
|
|
||||||
if test -n "$PKG_CONFIG"; then
|
if test -n "$PKG_CONFIG"; then
|
||||||
systemdsystemunitdir_default=$($PKG_CONFIG --variable=systemdsystemunitdir systemd)
|
systemdsystemunitdir_default=$($PKG_CONFIG --variable=systemdsystemunitdir systemd)
|
||||||
@ -126,38 +129,42 @@ fi
|
|||||||
# convert script name to uppercase
|
# convert script name to uppercase
|
||||||
AC_SUBST(ipsec_script_upper, [`echo -n "$ipsec_script" | tr a-z A-Z`])
|
AC_SUBST(ipsec_script_upper, [`echo -n "$ipsec_script" | tr a-z A-Z`])
|
||||||
|
|
||||||
|
m4_include(m4/macros/enable-disable.m4)
|
||||||
|
|
||||||
# crypto plugins
|
# crypto plugins
|
||||||
ARG_ENABL_SET([aes], [enable AES software implementation plugin.])
|
ARG_DISBL_SET([aes], [disable AES software implementation plugin.])
|
||||||
ARG_ENABL_SET([af-alg], [enable AF_ALG crypto interface to Linux Crypto API.])
|
ARG_ENABL_SET([af-alg], [enable AF_ALG crypto interface to Linux Crypto API.])
|
||||||
|
ARG_ENABL_SET([bliss], [enable BLISS software implementation plugin.])
|
||||||
ARG_ENABL_SET([blowfish], [enable Blowfish software implementation plugin.])
|
ARG_ENABL_SET([blowfish], [enable Blowfish software implementation plugin.])
|
||||||
ARG_ENABL_SET([botan], [enables the Botan crypto plugin.])
|
ARG_ENABL_SET([botan], [enables the Botan crypto plugin.])
|
||||||
ARG_ENABL_SET([ccm], [enables the CCM AEAD wrapper crypto plugin.])
|
ARG_ENABL_SET([ccm], [enables the CCM AEAD wrapper crypto plugin.])
|
||||||
ARG_ENABL_SET([chapoly], [enables the ChaCha20/Poly1305 AEAD plugin.])
|
ARG_ENABL_SET([chapoly], [enables the ChaCha20/Poly1305 AEAD plugin.])
|
||||||
ARG_DISBL_SET([cmac], [disable CMAC crypto implementation plugin.])
|
ARG_DISBL_SET([cmac], [disable CMAC crypto implementation plugin.])
|
||||||
ARG_ENABL_SET([ctr], [enables the Counter Mode wrapper crypto plugin.])
|
ARG_ENABL_SET([ctr], [enables the Counter Mode wrapper crypto plugin.])
|
||||||
ARG_ENABL_SET([des], [enable DES/3DES software implementation plugin.])
|
ARG_DISBL_SET([des], [disable DES/3DES software implementation plugin.])
|
||||||
ARG_DISBL_SET([drbg], [disable the NIST Deterministic Random Bit Generator plugin.])
|
ARG_DISBL_SET([drbg], [disable the NIST Deterministic Random Bit Generator plugin.])
|
||||||
ARG_ENABL_SET([fips-prf], [enable FIPS PRF software implementation plugin.])
|
ARG_DISBL_SET([fips-prf], [disable FIPS PRF software implementation plugin.])
|
||||||
ARG_ENABL_SET([gcm], [enable the GCM AEAD wrapper crypto plugin.])
|
ARG_DISBL_SET([gcm], [disable the GCM AEAD wrapper crypto plugin.])
|
||||||
ARG_ENABL_SET([gcrypt], [enables the libgcrypt plugin.])
|
ARG_ENABL_SET([gcrypt], [enables the libgcrypt plugin.])
|
||||||
ARG_ENABL_SET([gmp], [enable GNU MP (libgmp) based crypto implementation plugin.])
|
ARG_DISBL_SET([gmp], [disable GNU MP (libgmp) based crypto implementation plugin.])
|
||||||
ARG_ENABL_SET([curve25519], [enable Curve25519 Diffie-Hellman plugin.])
|
ARG_DISBL_SET([curve25519], [disable Curve25519 Diffie-Hellman plugin.])
|
||||||
ARG_ENABL_SET([hmac], [enable HMAC crypto implementation plugin.])
|
ARG_DISBL_SET([hmac], [disable HMAC crypto implementation plugin.])
|
||||||
ARG_DISBL_SET([kdf], [disable KDF (prf+) implementation plugin.])
|
ARG_DISBL_SET([kdf], [disable KDF (prf+) implementation plugin.])
|
||||||
ARG_ENABL_SET([md4], [enable MD4 software implementation plugin.])
|
ARG_ENABL_SET([md4], [enable MD4 software implementation plugin.])
|
||||||
ARG_ENABL_SET([md5], [enable MD5 software implementation plugin.])
|
ARG_DISBL_SET([md5], [disable MD5 software implementation plugin.])
|
||||||
ARG_ENABL_SET([mgf1], [enable the MGF1 software implementation plugin.])
|
ARG_ENABL_SET([mgf1], [enable the MGF1 software implementation plugin.])
|
||||||
ARG_ENABL_SET([ml], [enable Module-Lattice-based crypto (ML-KEM) plugin.])
|
ARG_ENABL_SET([newhope], [enable New Hope crypto plugin.])
|
||||||
ARG_DISBL_SET([nonce], [disable nonce generation plugin.])
|
ARG_DISBL_SET([nonce], [disable nonce generation plugin.])
|
||||||
ARG_DISBL_SET([openssl], [disable the OpenSSL crypto plugin.])
|
ARG_ENABL_SET([ntru], [enables the NTRU crypto plugin.])
|
||||||
|
ARG_ENABL_SET([openssl], [enables the OpenSSL crypto plugin.])
|
||||||
ARG_ENABL_SET([wolfssl], [enables the wolfSSL crypto plugin.])
|
ARG_ENABL_SET([wolfssl], [enables the wolfSSL crypto plugin.])
|
||||||
ARG_ENABL_SET([padlock], [enables VIA Padlock crypto plugin.])
|
ARG_ENABL_SET([padlock], [enables VIA Padlock crypto plugin.])
|
||||||
ARG_DISBL_SET([random], [disable RNG implementation on top of /dev/(u)random.])
|
ARG_DISBL_SET([random], [disable RNG implementation on top of /dev/(u)random.])
|
||||||
ARG_ENABL_SET([rc2], [enable RC2 software implementation plugin.])
|
ARG_DISBL_SET([rc2], [disable RC2 software implementation plugin.])
|
||||||
ARG_ENABL_SET([rdrand], [enable Intel RDRAND random generator plugin.])
|
ARG_ENABL_SET([rdrand], [enable Intel RDRAND random generator plugin.])
|
||||||
ARG_ENABL_SET([aesni], [enable Intel AES-NI crypto plugin.])
|
ARG_ENABL_SET([aesni], [enable Intel AES-NI crypto plugin.])
|
||||||
ARG_ENABL_SET([sha1], [enable SHA1 software implementation plugin.])
|
ARG_DISBL_SET([sha1], [disable SHA1 software implementation plugin.])
|
||||||
ARG_ENABL_SET([sha2], [enable SHA256/SHA384/SHA512 software implementation plugin.])
|
ARG_DISBL_SET([sha2], [disable SHA256/SHA384/SHA512 software implementation plugin.])
|
||||||
ARG_ENABL_SET([sha3], [enable SHA3_224/SHA3_256/SHA3_384/SHA3_512 software implementation plugin.])
|
ARG_ENABL_SET([sha3], [enable SHA3_224/SHA3_256/SHA3_384/SHA3_512 software implementation plugin.])
|
||||||
ARG_DISBL_SET([xcbc], [disable xcbc crypto implementation plugin.])
|
ARG_DISBL_SET([xcbc], [disable xcbc crypto implementation plugin.])
|
||||||
# encoding/decoding plugins
|
# encoding/decoding plugins
|
||||||
@ -167,7 +174,7 @@ ARG_DISBL_SET([pgp], [disable PGP key decoding plugin.])
|
|||||||
ARG_DISBL_SET([pkcs1], [disable PKCS1 key decoding plugin.])
|
ARG_DISBL_SET([pkcs1], [disable PKCS1 key decoding plugin.])
|
||||||
ARG_DISBL_SET([pkcs7], [disable PKCS7 container support plugin.])
|
ARG_DISBL_SET([pkcs7], [disable PKCS7 container support plugin.])
|
||||||
ARG_DISBL_SET([pkcs8], [disable PKCS8 private key decoding plugin.])
|
ARG_DISBL_SET([pkcs8], [disable PKCS8 private key decoding plugin.])
|
||||||
ARG_ENABL_SET([pkcs12], [enable PKCS12 container support plugin.])
|
ARG_DISBL_SET([pkcs12], [disable PKCS12 container support plugin.])
|
||||||
ARG_DISBL_SET([pubkey], [disable RAW public key support plugin.])
|
ARG_DISBL_SET([pubkey], [disable RAW public key support plugin.])
|
||||||
ARG_DISBL_SET([sshkey], [disable SSH key decoding plugin.])
|
ARG_DISBL_SET([sshkey], [disable SSH key decoding plugin.])
|
||||||
ARG_DISBL_SET([x509], [disable X509 certificate implementation plugin.])
|
ARG_DISBL_SET([x509], [disable X509 certificate implementation plugin.])
|
||||||
@ -230,9 +237,10 @@ ARG_DISBL_SET([socket-default], [disable default socket implementation for charo
|
|||||||
ARG_ENABL_SET([socket-dynamic], [enable dynamic socket implementation for charon])
|
ARG_ENABL_SET([socket-dynamic], [enable dynamic socket implementation for charon])
|
||||||
ARG_ENABL_SET([socket-win], [enable Winsock2 based socket implementation for charon])
|
ARG_ENABL_SET([socket-win], [enable Winsock2 based socket implementation for charon])
|
||||||
# configuration/control plugins
|
# configuration/control plugins
|
||||||
ARG_ENABL_SET([stroke], [enable the stroke configuration backend.])
|
ARG_DISBL_SET([stroke], [disable charons stroke configuration backend.])
|
||||||
ARG_ENABL_SET([smp], [enable SMP configuration and control interface. Requires libxml.])
|
ARG_ENABL_SET([smp], [enable SMP configuration and control interface. Requires libxml.])
|
||||||
ARG_ENABL_SET([sql], [enable SQL database configuration backend.])
|
ARG_ENABL_SET([sql], [enable SQL database configuration backend.])
|
||||||
|
ARG_ENABL_SET([uci], [enable OpenWRT UCI configuration plugin.])
|
||||||
ARG_DISBL_SET([vici], [disable strongSwan IKE generic IPC interface plugin.])
|
ARG_DISBL_SET([vici], [disable strongSwan IKE generic IPC interface plugin.])
|
||||||
# attribute provider/consumer plugins
|
# attribute provider/consumer plugins
|
||||||
ARG_ENABL_SET([android-dns], [enable Android specific DNS handler.])
|
ARG_ENABL_SET([android-dns], [enable Android specific DNS handler.])
|
||||||
@ -312,8 +320,8 @@ ARG_ENABL_SET([mediation], [enable IKEv2 Mediation Extension.])
|
|||||||
ARG_ENABL_SET([unwind-backtraces],[use libunwind to create backtraces for memory leaks and segfaults.])
|
ARG_ENABL_SET([unwind-backtraces],[use libunwind to create backtraces for memory leaks and segfaults.])
|
||||||
ARG_ENABL_SET([ruby-gems], [enable build of provided ruby gems.])
|
ARG_ENABL_SET([ruby-gems], [enable build of provided ruby gems.])
|
||||||
ARG_ENABL_SET([ruby-gems-install],[enable installation of provided ruby gems.])
|
ARG_ENABL_SET([ruby-gems-install],[enable installation of provided ruby gems.])
|
||||||
ARG_ENABL_SET([python-wheels], [enable build of provided python wheels.])
|
ARG_ENABL_SET([python-eggs], [enable build of provided python eggs.])
|
||||||
ARG_ENABL_SET([python-eggs], [legacy alias for --enable-python-wheels.])
|
ARG_ENABL_SET([python-eggs-install],[enable installation of provided python eggs.])
|
||||||
ARG_ENABL_SET([perl-cpan], [enable build of provided perl CPAN module.])
|
ARG_ENABL_SET([perl-cpan], [enable build of provided perl CPAN module.])
|
||||||
ARG_ENABL_SET([perl-cpan-install],[enable installation of provided CPAN module.])
|
ARG_ENABL_SET([perl-cpan-install],[enable installation of provided CPAN module.])
|
||||||
ARG_ENABL_SET([selinux], [enable SELinux support for labeled IPsec.])
|
ARG_ENABL_SET([selinux], [enable SELinux support for labeled IPsec.])
|
||||||
@ -462,10 +470,6 @@ if test x$fips_prf = xtrue; then
|
|||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$pkcs12 = xtrue; then
|
|
||||||
rc2=true;
|
|
||||||
fi
|
|
||||||
|
|
||||||
if test x$swanctl = xtrue; then
|
if test x$swanctl = xtrue; then
|
||||||
vici=true
|
vici=true
|
||||||
fi
|
fi
|
||||||
@ -491,8 +495,8 @@ if test x$ruby_gems_install = xtrue; then
|
|||||||
ruby_gems=true
|
ruby_gems=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$python_eggs = xtrue; then
|
if test x$python_eggs_install = xtrue; then
|
||||||
python_wheels=true
|
python_eggs=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$perl_cpan_install = xtrue; then
|
if test x$perl_cpan_install = xtrue; then
|
||||||
@ -507,11 +511,11 @@ if test x$tpm = xtrue; then
|
|||||||
tss_tss2=true
|
tss_tss2=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$gmp = xtrue; then
|
if test x$gmp = xtrue -o x$ntru = xtrue -o x$bliss = xtrue; then
|
||||||
mgf1=true
|
mgf1=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$stroke = xtrue -o x$vici = xtrue; then
|
if test x$stroke = xtrue; then
|
||||||
counters=true
|
counters=true
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@ -520,28 +524,15 @@ if test x$cert_enroll = xtrue; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$kdf = xfalse; then
|
if test x$kdf = xfalse; then
|
||||||
openssl_hkdf=false
|
|
||||||
if test x$openssl = xtrue; then
|
|
||||||
AC_MSG_CHECKING(for OpenSSL >= 3.0 for HKDF)
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_PROGRAM(
|
|
||||||
[[#include <openssl/opensslv.h>]],
|
|
||||||
[[#if OPENSSL_VERSION_NUMBER < 0x30000000L && !defined(OPENSSL_IS_AWSLC)
|
|
||||||
#error OpenSSL version unusable
|
|
||||||
#endif]])],
|
|
||||||
[AC_MSG_RESULT([yes]); openssl_hkdf=true],
|
|
||||||
[AC_MSG_RESULT([no])]
|
|
||||||
)
|
|
||||||
fi
|
|
||||||
if test x$aesni = xtrue -o x$cmac = xtrue -o x$xcbc = xtrue; then
|
if test x$aesni = xtrue -o x$cmac = xtrue -o x$xcbc = xtrue; then
|
||||||
AC_MSG_WARN(m4_normalize([
|
AC_MSG_WARN(m4_normalize([
|
||||||
kdf plugin is required for possible use of PRF_AES128_XCBC/CMAC
|
kdf plugin is required for possible use of PRF_AES128_XCBC/CMAC
|
||||||
by one of these plugins: aesni, cmac, xcbc]))
|
by one of these plugins: aesni, cmac, xcbc]))
|
||||||
kdf=true
|
kdf=true
|
||||||
elif test x$botan = xfalse -a x$openssl_hkdf = xfalse -a x$wolfssl = xfalse; then
|
elif test x$botan = xfalse -a x$openssl = xfalse -a x$wolfssl = xfalse; then
|
||||||
AC_MSG_WARN(m4_normalize([
|
AC_MSG_WARN(m4_normalize([
|
||||||
kdf plugin is required because none of the following plugins is
|
kdf plugin is required because none of the following plugins is
|
||||||
enabled or usable: botan, openssl, wolfssl]))
|
enabled: botan, openssl, wolfssl]))
|
||||||
kdf=true
|
kdf=true
|
||||||
fi
|
fi
|
||||||
fi
|
fi
|
||||||
@ -604,10 +595,6 @@ AC_LINK_IFELSE(
|
|||||||
AC_SUBST(ATOMICLIB)
|
AC_SUBST(ATOMICLIB)
|
||||||
|
|
||||||
LIBS=$saved_LIBS
|
LIBS=$saved_LIBS
|
||||||
|
|
||||||
# Some platforms require explicit linking to use POSIX regular expressions
|
|
||||||
AC_SEARCH_LIBS([regcomp], [regex], [AC_DEFINE([HAVE_REGEX], [], [have regcomp() etc.])])
|
|
||||||
|
|
||||||
# ------------------------------------------------------
|
# ------------------------------------------------------
|
||||||
|
|
||||||
AC_MSG_CHECKING(for dladdr)
|
AC_MSG_CHECKING(for dladdr)
|
||||||
@ -726,11 +713,6 @@ AC_CHECK_HEADERS([netinet/ip6.h linux/fib_rules.h], [], [],
|
|||||||
#include <sys/types.h>
|
#include <sys/types.h>
|
||||||
#include <netinet/in.h>
|
#include <netinet/in.h>
|
||||||
])
|
])
|
||||||
AC_CHECK_HEADERS([linux/vm_sockets.h], [have_vm_sockets=true], [],
|
|
||||||
[
|
|
||||||
#include <sys/socket.h>
|
|
||||||
])
|
|
||||||
AM_CONDITIONAL(USE_VM_SOCKETS, [test "x$have_vm_sockets" = xtrue])
|
|
||||||
|
|
||||||
AC_CHECK_MEMBERS([struct sockaddr.sa_len], [], [],
|
AC_CHECK_MEMBERS([struct sockaddr.sa_len], [], [],
|
||||||
[
|
[
|
||||||
@ -1044,7 +1026,7 @@ if test x$unbound = xtrue; then
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
if test x$soup = xtrue; then
|
if test x$soup = xtrue; then
|
||||||
PKG_CHECK_MODULES(soup, [libsoup-3.0])
|
PKG_CHECK_MODULES(soup, [libsoup-2.4])
|
||||||
AC_SUBST(soup_CFLAGS)
|
AC_SUBST(soup_CFLAGS)
|
||||||
AC_SUBST(soup_LIBS)
|
AC_SUBST(soup_LIBS)
|
||||||
fi
|
fi
|
||||||
@ -1238,6 +1220,11 @@ if test x$botan = xtrue; then
|
|||||||
LIBS=$saved_LIBS
|
LIBS=$saved_LIBS
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
if test x$uci = xtrue; then
|
||||||
|
AC_CHECK_LIB([uci],[uci_alloc_context],[LIBS="$LIBS"],[AC_MSG_ERROR([UCI library libuci not found])],[])
|
||||||
|
AC_CHECK_HEADER([uci.h],,[AC_MSG_ERROR([UCI header uci.h not found!])])
|
||||||
|
fi
|
||||||
|
|
||||||
if test x$android_dns = xtrue; then
|
if test x$android_dns = xtrue; then
|
||||||
AC_CHECK_LIB([cutils],[property_get],[LIBS="$LIBS"],[AC_MSG_ERROR([Android library libcutils not found])],[])
|
AC_CHECK_LIB([cutils],[property_get],[LIBS="$LIBS"],[AC_MSG_ERROR([Android library libcutils not found])],[])
|
||||||
AC_CHECK_HEADER([cutils/properties.h],,[AC_MSG_ERROR([Android header cutils/properties.h not found!])])
|
AC_CHECK_HEADER([cutils/properties.h],,[AC_MSG_ERROR([Android header cutils/properties.h not found!])])
|
||||||
@ -1345,10 +1332,6 @@ if test x$unwind_backtraces = xtrue; then
|
|||||||
AC_SUBST(UNWINDLIB)
|
AC_SUBST(UNWINDLIB)
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if test "x$testable_ke" = xyes; then
|
|
||||||
AC_DEFINE([TESTABLE_KE], [1], [Define to 1 if key exchange methods should be testable.])
|
|
||||||
fi
|
|
||||||
|
|
||||||
AM_CONDITIONAL(USE_DEV_HEADERS, [test "x$dev_headers" != xno])
|
AM_CONDITIONAL(USE_DEV_HEADERS, [test "x$dev_headers" != xno])
|
||||||
if test x$dev_headers = xyes; then
|
if test x$dev_headers = xyes; then
|
||||||
dev_headers="$includedir/strongswan"
|
dev_headers="$includedir/strongswan"
|
||||||
@ -1372,7 +1355,7 @@ if test x$coverage = xtrue; then
|
|||||||
AC_MSG_ERROR([genhtml not found])
|
AC_MSG_ERROR([genhtml not found])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
COVERAGE_CFLAGS="-fprofile-arcs -ftest-coverage -fprofile-update=atomic"
|
COVERAGE_CFLAGS="-fprofile-arcs -ftest-coverage"
|
||||||
COVERAGE_LDFLAGS="-fprofile-arcs"
|
COVERAGE_LDFLAGS="-fprofile-arcs"
|
||||||
AC_SUBST(COVERAGE_CFLAGS)
|
AC_SUBST(COVERAGE_CFLAGS)
|
||||||
AC_SUBST(COVERAGE_LDFLAGS)
|
AC_SUBST(COVERAGE_LDFLAGS)
|
||||||
@ -1438,12 +1421,24 @@ if test x$ruby_gems = xtrue; then
|
|||||||
fi
|
fi
|
||||||
AM_CONDITIONAL(RUBY_GEMS_INSTALL, [test "x$ruby_gems_install" = xtrue])
|
AM_CONDITIONAL(RUBY_GEMS_INSTALL, [test "x$ruby_gems_install" = xtrue])
|
||||||
|
|
||||||
if test x$python_wheels = xtrue; then
|
if test x$python_eggs = xtrue; then
|
||||||
PYTHON_PACKAGE_VERSION=`echo "$PACKAGE_VERSION" | $SED 's/dr/.dev/'`
|
PYTHON_PACKAGE_VERSION=`echo "$PACKAGE_VERSION" | $SED 's/dr/.dev/'`
|
||||||
AC_SUBST([PYTHON_PACKAGE_VERSION])
|
AC_SUBST([PYTHON_PACKAGE_VERSION])
|
||||||
|
if test x$python_eggs_install = xtrue; then
|
||||||
|
AC_PATH_PROG([EASY_INSTALL], [easy_install], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
||||||
|
if test x$EASY_INSTALL = x; then
|
||||||
|
AC_MSG_ERROR(Python easy_install not found)
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
if test "x$pythoneggdir" = "xmain site-packages directory"; then
|
||||||
|
AC_SUBST(PYTHONEGGINSTALLDIR, "")
|
||||||
|
else
|
||||||
|
AC_SUBST(PYTHONEGGINSTALLDIR, "--install-dir $pythoneggdir")
|
||||||
|
fi
|
||||||
AC_PATH_PROG([TOX], [tox], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
AC_PATH_PROG([TOX], [tox], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
||||||
AC_PATH_PROG([PY_TEST], [py.test], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
AC_PATH_PROG([PY_TEST], [py.test], [], [$PATH:/bin:/usr/bin:/usr/local/bin])
|
||||||
fi
|
fi
|
||||||
|
AM_CONDITIONAL(PYTHON_EGGS_INSTALL, [test "x$python_eggs_install" = xtrue])
|
||||||
|
|
||||||
AM_CONDITIONAL(PERL_CPAN_INSTALL, [test "x$perl_cpan_install" = xtrue])
|
AM_CONDITIONAL(PERL_CPAN_INSTALL, [test "x$perl_cpan_install" = xtrue])
|
||||||
|
|
||||||
@ -1515,6 +1510,8 @@ CFLAGS="$WARN_CFLAGS $CFLAGS"
|
|||||||
# collect plugin list for strongSwan components
|
# collect plugin list for strongSwan components
|
||||||
# ===============================================
|
# ===============================================
|
||||||
|
|
||||||
|
m4_include(m4/macros/add-plugin.m4)
|
||||||
|
|
||||||
# plugin lists for all components
|
# plugin lists for all components
|
||||||
charon_plugins=
|
charon_plugins=
|
||||||
pool_plugins=
|
pool_plugins=
|
||||||
@ -1588,8 +1585,10 @@ ADD_PLUGIN([kdf], [s charon pki scripts nm cmd])
|
|||||||
ADD_PLUGIN([ctr], [s charon scripts nm cmd])
|
ADD_PLUGIN([ctr], [s charon scripts nm cmd])
|
||||||
ADD_PLUGIN([ccm], [s charon scripts nm cmd])
|
ADD_PLUGIN([ccm], [s charon scripts nm cmd])
|
||||||
ADD_PLUGIN([gcm], [s charon scripts nm cmd])
|
ADD_PLUGIN([gcm], [s charon scripts nm cmd])
|
||||||
ADD_PLUGIN([ml], [s charon scripts nm cmd])
|
ADD_PLUGIN([ntru], [s charon scripts nm cmd])
|
||||||
ADD_PLUGIN([drbg], [s charon pki scripts nm cmd])
|
ADD_PLUGIN([drbg], [s charon pki scripts nm cmd])
|
||||||
|
ADD_PLUGIN([newhope], [s charon scripts nm cmd])
|
||||||
|
ADD_PLUGIN([bliss], [s charon pki scripts nm cmd])
|
||||||
ADD_PLUGIN([curl], [s charon pki scripts nm cmd])
|
ADD_PLUGIN([curl], [s charon pki scripts nm cmd])
|
||||||
ADD_PLUGIN([files], [s charon pki scripts nm cmd])
|
ADD_PLUGIN([files], [s charon pki scripts nm cmd])
|
||||||
ADD_PLUGIN([winhttp], [s charon pki scripts])
|
ADD_PLUGIN([winhttp], [s charon pki scripts])
|
||||||
@ -1670,6 +1669,7 @@ ADD_PLUGIN([led], [c charon])
|
|||||||
ADD_PLUGIN([duplicheck], [c charon])
|
ADD_PLUGIN([duplicheck], [c charon])
|
||||||
ADD_PLUGIN([coupling], [c charon])
|
ADD_PLUGIN([coupling], [c charon])
|
||||||
ADD_PLUGIN([radattr], [c charon])
|
ADD_PLUGIN([radattr], [c charon])
|
||||||
|
ADD_PLUGIN([uci], [c charon])
|
||||||
ADD_PLUGIN([addrblock], [c charon])
|
ADD_PLUGIN([addrblock], [c charon])
|
||||||
ADD_PLUGIN([unity], [c charon])
|
ADD_PLUGIN([unity], [c charon])
|
||||||
ADD_PLUGIN([counters], [c charon])
|
ADD_PLUGIN([counters], [c charon])
|
||||||
@ -1755,8 +1755,10 @@ AM_CONDITIONAL(USE_CTR, test x$ctr = xtrue)
|
|||||||
AM_CONDITIONAL(USE_CCM, test x$ccm = xtrue)
|
AM_CONDITIONAL(USE_CCM, test x$ccm = xtrue)
|
||||||
AM_CONDITIONAL(USE_GCM, test x$gcm = xtrue)
|
AM_CONDITIONAL(USE_GCM, test x$gcm = xtrue)
|
||||||
AM_CONDITIONAL(USE_AF_ALG, test x$af_alg = xtrue)
|
AM_CONDITIONAL(USE_AF_ALG, test x$af_alg = xtrue)
|
||||||
|
AM_CONDITIONAL(USE_NTRU, test x$ntru = xtrue)
|
||||||
|
AM_CONDITIONAL(USE_NEWHOPE, test x$newhope = xtrue)
|
||||||
|
AM_CONDITIONAL(USE_BLISS, test x$bliss = xtrue)
|
||||||
AM_CONDITIONAL(USE_DRBG, test x$drbg = xtrue)
|
AM_CONDITIONAL(USE_DRBG, test x$drbg = xtrue)
|
||||||
AM_CONDITIONAL(USE_ML, test x$ml = xtrue)
|
|
||||||
|
|
||||||
# charon plugins
|
# charon plugins
|
||||||
# ----------------
|
# ----------------
|
||||||
@ -1764,6 +1766,7 @@ AM_CONDITIONAL(USE_STROKE, test x$stroke = xtrue)
|
|||||||
AM_CONDITIONAL(USE_VICI, test x$vici = xtrue)
|
AM_CONDITIONAL(USE_VICI, test x$vici = xtrue)
|
||||||
AM_CONDITIONAL(USE_MEDSRV, test x$medsrv = xtrue)
|
AM_CONDITIONAL(USE_MEDSRV, test x$medsrv = xtrue)
|
||||||
AM_CONDITIONAL(USE_MEDCLI, test x$medcli = xtrue)
|
AM_CONDITIONAL(USE_MEDCLI, test x$medcli = xtrue)
|
||||||
|
AM_CONDITIONAL(USE_UCI, test x$uci = xtrue)
|
||||||
AM_CONDITIONAL(USE_OSX_ATTR, test x$osx_attr = xtrue)
|
AM_CONDITIONAL(USE_OSX_ATTR, test x$osx_attr = xtrue)
|
||||||
AM_CONDITIONAL(USE_P_CSCF, test x$p_cscf = xtrue)
|
AM_CONDITIONAL(USE_P_CSCF, test x$p_cscf = xtrue)
|
||||||
AM_CONDITIONAL(USE_ANDROID_DNS, test x$android_dns = xtrue)
|
AM_CONDITIONAL(USE_ANDROID_DNS, test x$android_dns = xtrue)
|
||||||
@ -1874,6 +1877,7 @@ AM_CONDITIONAL(USE_CONFTEST, test x$conftest = xtrue)
|
|||||||
AM_CONDITIONAL(USE_LIBSTRONGSWAN, test x$charon = xtrue -o x$pki = xtrue -o x$conftest = xtrue -o x$fast = xtrue -o x$imcv = xtrue -o x$nm = xtrue -o x$tkm = xtrue -o x$cmd = xtrue -o x$tls = xtrue -o x$tnc_tnccs = xtrue -o x$aikgen = xtrue -o x$svc = xtrue -o x$systemd = xtrue)
|
AM_CONDITIONAL(USE_LIBSTRONGSWAN, test x$charon = xtrue -o x$pki = xtrue -o x$conftest = xtrue -o x$fast = xtrue -o x$imcv = xtrue -o x$nm = xtrue -o x$tkm = xtrue -o x$cmd = xtrue -o x$tls = xtrue -o x$tnc_tnccs = xtrue -o x$aikgen = xtrue -o x$svc = xtrue -o x$systemd = xtrue)
|
||||||
AM_CONDITIONAL(USE_LIBCHARON, test x$charon = xtrue -o x$conftest = xtrue -o x$nm = xtrue -o x$tkm = xtrue -o x$cmd = xtrue -o x$svc = xtrue -o x$systemd = xtrue)
|
AM_CONDITIONAL(USE_LIBCHARON, test x$charon = xtrue -o x$conftest = xtrue -o x$nm = xtrue -o x$tkm = xtrue -o x$cmd = xtrue -o x$svc = xtrue -o x$systemd = xtrue)
|
||||||
AM_CONDITIONAL(USE_LIBIPSEC, test x$libipsec = xtrue)
|
AM_CONDITIONAL(USE_LIBIPSEC, test x$libipsec = xtrue)
|
||||||
|
AM_CONDITIONAL(USE_LIBNTTFFT, test x$bliss = xtrue -o x$newhope = xtrue)
|
||||||
AM_CONDITIONAL(USE_LIBTNCIF, test x$tnc_tnccs = xtrue -o x$imcv = xtrue)
|
AM_CONDITIONAL(USE_LIBTNCIF, test x$tnc_tnccs = xtrue -o x$imcv = xtrue)
|
||||||
AM_CONDITIONAL(USE_LIBTNCCS, test x$tnc_tnccs = xtrue)
|
AM_CONDITIONAL(USE_LIBTNCCS, test x$tnc_tnccs = xtrue)
|
||||||
AM_CONDITIONAL(USE_LIBPTTLS, test x$tnc_tnccs = xtrue)
|
AM_CONDITIONAL(USE_LIBPTTLS, test x$tnc_tnccs = xtrue)
|
||||||
@ -1904,7 +1908,7 @@ AM_CONDITIONAL(USE_LEGACY_SYSTEMD, test -n "$systemdsystemunitdir" -a "x$systemd
|
|||||||
AM_CONDITIONAL(USE_CERT_ENROLL, test x$cert_enroll = xtrue)
|
AM_CONDITIONAL(USE_CERT_ENROLL, test x$cert_enroll = xtrue)
|
||||||
AM_CONDITIONAL(USE_CERT_ENROLL_TIMER, test x$cert_enroll_timer = xtrue)
|
AM_CONDITIONAL(USE_CERT_ENROLL_TIMER, test x$cert_enroll_timer = xtrue)
|
||||||
AM_CONDITIONAL(USE_RUBY_GEMS, test x$ruby_gems = xtrue)
|
AM_CONDITIONAL(USE_RUBY_GEMS, test x$ruby_gems = xtrue)
|
||||||
AM_CONDITIONAL(USE_PYTHON_WHEELS, test x$python_wheels = xtrue)
|
AM_CONDITIONAL(USE_PYTHON_EGGS, test x$python_eggs = xtrue)
|
||||||
AM_CONDITIONAL(USE_PERL_CPAN, test x$perl_cpan = xtrue)
|
AM_CONDITIONAL(USE_PERL_CPAN, test x$perl_cpan = xtrue)
|
||||||
AM_CONDITIONAL(USE_TOX, test "x$TOX" != x)
|
AM_CONDITIONAL(USE_TOX, test "x$TOX" != x)
|
||||||
AM_CONDITIONAL(USE_PY_TEST, test "x$PY_TEST" != x -a "x$TOX" = x)
|
AM_CONDITIONAL(USE_PY_TEST, test "x$PY_TEST" != x -a "x$TOX" = x)
|
||||||
@ -1949,16 +1953,14 @@ strongswan_options=
|
|||||||
|
|
||||||
AM_COND_IF([USE_AIKGEN], [strongswan_options=${strongswan_options}" aikgen"])
|
AM_COND_IF([USE_AIKGEN], [strongswan_options=${strongswan_options}" aikgen"])
|
||||||
AM_COND_IF([USE_ATTR_SQL], [strongswan_options=${strongswan_options}" pool"])
|
AM_COND_IF([USE_ATTR_SQL], [strongswan_options=${strongswan_options}" pool"])
|
||||||
AM_COND_IF([USE_CHARON], [strongswan_options=${strongswan_options}" charon charon-logging iptfs"])
|
AM_COND_IF([USE_CHARON], [strongswan_options=${strongswan_options}" charon charon-logging"])
|
||||||
AM_COND_IF([USE_FILE_CONFIG], [strongswan_options=${strongswan_options}" starter"])
|
AM_COND_IF([USE_FILE_CONFIG], [strongswan_options=${strongswan_options}" starter"])
|
||||||
AM_COND_IF([USE_IMV_ATTESTATION], [strongswan_options=${strongswan_options}" attest"])
|
AM_COND_IF([USE_IMV_ATTESTATION], [strongswan_options=${strongswan_options}" attest"])
|
||||||
AM_COND_IF([USE_IMCV], [strongswan_options=${strongswan_options}" imcv imv_policy_manager"])
|
AM_COND_IF([USE_IMCV], [strongswan_options=${strongswan_options}" imcv"])
|
||||||
AM_COND_IF([USE_IMC_SWIMA], [strongswan_options=${strongswan_options}" sw-collector"])
|
|
||||||
AM_COND_IF([USE_IMV_SWIMA], [strongswan_options=${strongswan_options}" sec-updater"])
|
AM_COND_IF([USE_IMV_SWIMA], [strongswan_options=${strongswan_options}" sec-updater"])
|
||||||
AM_COND_IF([USE_LIBTNCCS], [strongswan_options=${strongswan_options}" tnc"])
|
AM_COND_IF([USE_LIBTNCCS], [strongswan_options=${strongswan_options}" tnc"])
|
||||||
AM_COND_IF([USE_MANAGER], [strongswan_options=${strongswan_options}" manager"])
|
AM_COND_IF([USE_MANAGER], [strongswan_options=${strongswan_options}" manager"])
|
||||||
AM_COND_IF([USE_MEDSRV], [strongswan_options=${strongswan_options}" medsrv"])
|
AM_COND_IF([USE_MEDSRV], [strongswan_options=${strongswan_options}" medsrv"])
|
||||||
AM_COND_IF([USE_NM], [strongswan_options=${strongswan_options}" charon-nm"])
|
|
||||||
AM_COND_IF([USE_PKI], [strongswan_options=${strongswan_options}" pki"])
|
AM_COND_IF([USE_PKI], [strongswan_options=${strongswan_options}" pki"])
|
||||||
AM_COND_IF([USE_SWANCTL], [strongswan_options=${strongswan_options}" swanctl"])
|
AM_COND_IF([USE_SWANCTL], [strongswan_options=${strongswan_options}" swanctl"])
|
||||||
AM_COND_IF([USE_SYSTEMD], [strongswan_options=${strongswan_options}" charon-systemd"])
|
AM_COND_IF([USE_SYSTEMD], [strongswan_options=${strongswan_options}" charon-systemd"])
|
||||||
@ -1980,6 +1982,8 @@ AC_CONFIG_FILES([
|
|||||||
src/Makefile
|
src/Makefile
|
||||||
src/include/Makefile
|
src/include/Makefile
|
||||||
src/libstrongswan/Makefile
|
src/libstrongswan/Makefile
|
||||||
|
src/libstrongswan/math/libnttfft/Makefile
|
||||||
|
src/libstrongswan/math/libnttfft/tests/Makefile
|
||||||
src/libstrongswan/plugins/aes/Makefile
|
src/libstrongswan/plugins/aes/Makefile
|
||||||
src/libstrongswan/plugins/cmac/Makefile
|
src/libstrongswan/plugins/cmac/Makefile
|
||||||
src/libstrongswan/plugins/des/Makefile
|
src/libstrongswan/plugins/des/Makefile
|
||||||
@ -2037,7 +2041,11 @@ AC_CONFIG_FILES([
|
|||||||
src/libstrongswan/plugins/gcm/Makefile
|
src/libstrongswan/plugins/gcm/Makefile
|
||||||
src/libstrongswan/plugins/af_alg/Makefile
|
src/libstrongswan/plugins/af_alg/Makefile
|
||||||
src/libstrongswan/plugins/drbg/Makefile
|
src/libstrongswan/plugins/drbg/Makefile
|
||||||
src/libstrongswan/plugins/ml/Makefile
|
src/libstrongswan/plugins/ntru/Makefile
|
||||||
|
src/libstrongswan/plugins/bliss/Makefile
|
||||||
|
src/libstrongswan/plugins/bliss/tests/Makefile
|
||||||
|
src/libstrongswan/plugins/newhope/Makefile
|
||||||
|
src/libstrongswan/plugins/newhope/tests/Makefile
|
||||||
src/libstrongswan/plugins/test_vectors/Makefile
|
src/libstrongswan/plugins/test_vectors/Makefile
|
||||||
src/libstrongswan/tests/Makefile
|
src/libstrongswan/tests/Makefile
|
||||||
src/libipsec/Makefile
|
src/libipsec/Makefile
|
||||||
@ -2118,6 +2126,7 @@ AC_CONFIG_FILES([
|
|||||||
src/libcharon/plugins/medcli/Makefile
|
src/libcharon/plugins/medcli/Makefile
|
||||||
src/libcharon/plugins/addrblock/Makefile
|
src/libcharon/plugins/addrblock/Makefile
|
||||||
src/libcharon/plugins/unity/Makefile
|
src/libcharon/plugins/unity/Makefile
|
||||||
|
src/libcharon/plugins/uci/Makefile
|
||||||
src/libcharon/plugins/ha/Makefile
|
src/libcharon/plugins/ha/Makefile
|
||||||
src/libcharon/plugins/kernel_netlink/Makefile
|
src/libcharon/plugins/kernel_netlink/Makefile
|
||||||
src/libcharon/plugins/kernel_pfkey/Makefile
|
src/libcharon/plugins/kernel_pfkey/Makefile
|
||||||
|
@ -11,7 +11,7 @@ AM_CPPFLAGS = @CPPFLAGS@ \
|
|||||||
|
|
||||||
fuzz_ldflags = ${libfuzzer} \
|
fuzz_ldflags = ${libfuzzer} \
|
||||||
$(top_builddir)/src/libstrongswan/.libs/libstrongswan.a \
|
$(top_builddir)/src/libstrongswan/.libs/libstrongswan.a \
|
||||||
-Wl,-Bstatic -lcrypto -Wl,-Bdynamic \
|
-Wl,-Bstatic -lgmp -Wl,-Bdynamic \
|
||||||
@FUZZING_LDFLAGS@
|
@FUZZING_LDFLAGS@
|
||||||
|
|
||||||
pa_tnc_ldflags = \
|
pa_tnc_ldflags = \
|
||||||
|
@ -2,12 +2,10 @@
|
|||||||
SUBDIRS =
|
SUBDIRS =
|
||||||
|
|
||||||
if USE_LEGACY_SYSTEMD
|
if USE_LEGACY_SYSTEMD
|
||||||
if USE_FILE_CONFIG
|
|
||||||
if USE_CHARON
|
if USE_CHARON
|
||||||
SUBDIRS += systemd-starter
|
SUBDIRS += systemd-starter
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
endif
|
|
||||||
|
|
||||||
if USE_SYSTEMD
|
if USE_SYSTEMD
|
||||||
if USE_SWANCTL
|
if USE_SWANCTL
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=strongSwan IPsec IKEv1/IKEv2 daemon using ipsec.conf
|
Description=strongSwan IPsec IKEv1/IKEv2 daemon using ipsec.conf
|
||||||
After=syslog.target network-online.target
|
After=syslog.target network-online.target
|
||||||
Wants=syslog.target network-online.target
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
ExecStart=@SBINDIR@/@IPSEC_SCRIPT@ start --nofork
|
ExecStart=@SBINDIR@/@IPSEC_SCRIPT@ start --nofork
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
[Unit]
|
[Unit]
|
||||||
Description=strongSwan IPsec IKEv1/IKEv2 daemon using swanctl
|
Description=strongSwan IPsec IKEv1/IKEv2 daemon using swanctl
|
||||||
After=network-online.target
|
After=network-online.target
|
||||||
Wants=network-online.target
|
|
||||||
|
|
||||||
[Service]
|
[Service]
|
||||||
Type=notify
|
Type=notify
|
||||||
|
@ -1,532 +0,0 @@
|
|||||||
# host-cpu-c-abi.m4
|
|
||||||
# serial 20
|
|
||||||
dnl Copyright (C) 2002-2025 Free Software Foundation, Inc.
|
|
||||||
dnl This file is free software; the Free Software Foundation
|
|
||||||
dnl gives unlimited permission to copy and/or distribute it,
|
|
||||||
dnl with or without modifications, as long as this notice is preserved.
|
|
||||||
dnl This file is offered as-is, without any warranty.
|
|
||||||
|
|
||||||
dnl From Bruno Haible and Sam Steingold.
|
|
||||||
|
|
||||||
dnl Sets the HOST_CPU variable to the canonical name of the CPU.
|
|
||||||
dnl Sets the HOST_CPU_C_ABI variable to the canonical name of the CPU with its
|
|
||||||
dnl C language ABI (application binary interface).
|
|
||||||
dnl Also defines __${HOST_CPU}__ and __${HOST_CPU_C_ABI}__ as C macros in
|
|
||||||
dnl config.h.
|
|
||||||
dnl
|
|
||||||
dnl This canonical name can be used to select a particular assembly language
|
|
||||||
dnl source file that will interoperate with C code on the given host.
|
|
||||||
dnl
|
|
||||||
dnl For example:
|
|
||||||
dnl * 'i386' and 'sparc' are different canonical names, because code for i386
|
|
||||||
dnl will not run on SPARC CPUs and vice versa. They have different
|
|
||||||
dnl instruction sets.
|
|
||||||
dnl * 'sparc' and 'sparc64' are different canonical names, because code for
|
|
||||||
dnl 'sparc' and code for 'sparc64' cannot be linked together: 'sparc' code
|
|
||||||
dnl contains 32-bit instructions, whereas 'sparc64' code contains 64-bit
|
|
||||||
dnl instructions. A process on a SPARC CPU can be in 32-bit mode or in 64-bit
|
|
||||||
dnl mode, but not both.
|
|
||||||
dnl * 'mips' and 'mipsn32' are different canonical names, because they use
|
|
||||||
dnl different argument passing and return conventions for C functions, and
|
|
||||||
dnl although the instruction set of 'mips' is a large subset of the
|
|
||||||
dnl instruction set of 'mipsn32'.
|
|
||||||
dnl * 'mipsn32' and 'mips64' are different canonical names, because they use
|
|
||||||
dnl different sizes for the C types like 'int' and 'void *', and although
|
|
||||||
dnl the instruction sets of 'mipsn32' and 'mips64' are the same.
|
|
||||||
dnl * The same canonical name is used for different endiannesses. You can
|
|
||||||
dnl determine the endianness through preprocessor symbols:
|
|
||||||
dnl - 'arm': test __ARMEL__.
|
|
||||||
dnl - 'mips', 'mipsn32', 'mips64': test _MIPSEB vs. _MIPSEL.
|
|
||||||
dnl - 'powerpc64': test __BIG_ENDIAN__ vs. __LITTLE_ENDIAN__.
|
|
||||||
dnl * The same name 'i386' is used for CPUs of type i386, i486, i586
|
|
||||||
dnl (Pentium), AMD K7, Pentium II, Pentium IV, etc., because
|
|
||||||
dnl - Instructions that do not exist on all of these CPUs (cmpxchg,
|
|
||||||
dnl MMX, SSE, SSE2, 3DNow! etc.) are not frequently used. If your
|
|
||||||
dnl assembly language source files use such instructions, you will
|
|
||||||
dnl need to make the distinction.
|
|
||||||
dnl - Speed of execution of the common instruction set is reasonable across
|
|
||||||
dnl the entire family of CPUs. If you have assembly language source files
|
|
||||||
dnl that are optimized for particular CPU types (like GNU gmp has), you
|
|
||||||
dnl will need to make the distinction.
|
|
||||||
dnl See <https://en.wikipedia.org/wiki/X86_instruction_listings>.
|
|
||||||
AC_DEFUN([gl_HOST_CPU_C_ABI],
|
|
||||||
[
|
|
||||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
|
||||||
AC_REQUIRE([gl_C_ASM])
|
|
||||||
AC_CACHE_CHECK([host CPU and C ABI], [gl_cv_host_cpu_c_abi],
|
|
||||||
[case "$host_cpu" in
|
|
||||||
|
|
||||||
changequote(,)dnl
|
|
||||||
i[34567]86 )
|
|
||||||
changequote([,])dnl
|
|
||||||
gl_cv_host_cpu_c_abi=i386
|
|
||||||
;;
|
|
||||||
|
|
||||||
x86_64 )
|
|
||||||
# On x86_64 systems, the C compiler may be generating code in one of
|
|
||||||
# these ABIs:
|
|
||||||
# - 64-bit instruction set, 64-bit pointers, 64-bit 'long': x86_64.
|
|
||||||
# - 64-bit instruction set, 64-bit pointers, 32-bit 'long': x86_64
|
|
||||||
# with native Windows (mingw, MSVC).
|
|
||||||
# - 64-bit instruction set, 32-bit pointers, 32-bit 'long': x86_64-x32.
|
|
||||||
# - 32-bit instruction set, 32-bit pointers, 32-bit 'long': i386.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if (defined __x86_64__ || defined __amd64__ \
|
|
||||||
|| defined _M_X64 || defined _M_AMD64)
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __ILP32__ || defined _ILP32
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=x86_64-x32],
|
|
||||||
[gl_cv_host_cpu_c_abi=x86_64])],
|
|
||||||
[gl_cv_host_cpu_c_abi=i386])
|
|
||||||
;;
|
|
||||||
|
|
||||||
changequote(,)dnl
|
|
||||||
alphaev[4-8] | alphaev56 | alphapca5[67] | alphaev6[78] )
|
|
||||||
changequote([,])dnl
|
|
||||||
gl_cv_host_cpu_c_abi=alpha
|
|
||||||
;;
|
|
||||||
|
|
||||||
arm* | aarch64 )
|
|
||||||
# Assume arm with EABI.
|
|
||||||
# On arm64 systems, the C compiler may be generating code in one of
|
|
||||||
# these ABIs:
|
|
||||||
# - aarch64 instruction set, 64-bit pointers, 64-bit 'long': arm64.
|
|
||||||
# - aarch64 instruction set, 32-bit pointers, 32-bit 'long': arm64-ilp32.
|
|
||||||
# - 32-bit instruction set, 32-bit pointers, 32-bit 'long': arm or armhf.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#ifdef __aarch64__
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __ILP32__ || defined _ILP32
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=arm64-ilp32],
|
|
||||||
[gl_cv_host_cpu_c_abi=arm64])],
|
|
||||||
[# Don't distinguish little-endian and big-endian arm, since they
|
|
||||||
# don't require different machine code for simple operations and
|
|
||||||
# since the user can distinguish them through the preprocessor
|
|
||||||
# defines __ARMEL__ vs. __ARMEB__.
|
|
||||||
# But distinguish arm which passes floating-point arguments and
|
|
||||||
# return values in integer registers (r0, r1, ...) - this is
|
|
||||||
# gcc -mfloat-abi=soft or gcc -mfloat-abi=softfp - from arm which
|
|
||||||
# passes them in float registers (s0, s1, ...) and double registers
|
|
||||||
# (d0, d1, ...) - this is gcc -mfloat-abi=hard. GCC 4.6 or newer
|
|
||||||
# sets the preprocessor defines __ARM_PCS (for the first case) and
|
|
||||||
# __ARM_PCS_VFP (for the second case), but older GCC does not.
|
|
||||||
echo 'double ddd; void func (double dd) { ddd = dd; }' > conftest.c
|
|
||||||
# Look for a reference to the register d0 in the .s file.
|
|
||||||
AC_TRY_COMMAND(${CC-cc} $CFLAGS $CPPFLAGS $gl_c_asm_opt conftest.c) >/dev/null 2>&1
|
|
||||||
if LC_ALL=C grep 'd0,' conftest.$gl_asmext >/dev/null; then
|
|
||||||
gl_cv_host_cpu_c_abi=armhf
|
|
||||||
else
|
|
||||||
gl_cv_host_cpu_c_abi=arm
|
|
||||||
fi
|
|
||||||
rm -fr conftest*
|
|
||||||
])
|
|
||||||
;;
|
|
||||||
|
|
||||||
hppa1.0 | hppa1.1 | hppa2.0* | hppa64 )
|
|
||||||
# On hppa, the C compiler may be generating 32-bit code or 64-bit
|
|
||||||
# code. In the latter case, it defines _LP64 and __LP64__.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#ifdef __LP64__
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=hppa64],
|
|
||||||
[gl_cv_host_cpu_c_abi=hppa])
|
|
||||||
;;
|
|
||||||
|
|
||||||
ia64* )
|
|
||||||
# On ia64 on HP-UX, the C compiler may be generating 64-bit code or
|
|
||||||
# 32-bit code. In the latter case, it defines _ILP32.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#ifdef _ILP32
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=ia64-ilp32],
|
|
||||||
[gl_cv_host_cpu_c_abi=ia64])
|
|
||||||
;;
|
|
||||||
|
|
||||||
mips* )
|
|
||||||
# We should also check for (_MIPS_SZPTR == 64), but gcc keeps this
|
|
||||||
# at 32.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined _MIPS_SZLONG && (_MIPS_SZLONG == 64)
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=mips64],
|
|
||||||
[# In the n32 ABI, _ABIN32 is defined, _ABIO32 is not defined (but
|
|
||||||
# may later get defined by <sgidefs.h>), and _MIPS_SIM == _ABIN32.
|
|
||||||
# In the 32 ABI, _ABIO32 is defined, _ABIN32 is not defined (but
|
|
||||||
# may later get defined by <sgidefs.h>), and _MIPS_SIM == _ABIO32.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if (_MIPS_SIM == _ABIN32)
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=mipsn32],
|
|
||||||
[gl_cv_host_cpu_c_abi=mips])])
|
|
||||||
;;
|
|
||||||
|
|
||||||
powerpc* )
|
|
||||||
# Different ABIs are in use on AIX vs. Mac OS X vs. Linux,*BSD.
|
|
||||||
# No need to distinguish them here; the caller may distinguish
|
|
||||||
# them based on the OS.
|
|
||||||
# On powerpc64 systems, the C compiler may still be generating
|
|
||||||
# 32-bit code. And on powerpc-ibm-aix systems, the C compiler may
|
|
||||||
# be generating 64-bit code.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __powerpc64__ || defined __LP64__
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[# On powerpc64, there are two ABIs on Linux: The AIX compatible
|
|
||||||
# one and the ELFv2 one. The latter defines _CALL_ELF=2.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined _CALL_ELF && _CALL_ELF == 2
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=powerpc64-elfv2],
|
|
||||||
[gl_cv_host_cpu_c_abi=powerpc64])
|
|
||||||
],
|
|
||||||
[gl_cv_host_cpu_c_abi=powerpc])
|
|
||||||
;;
|
|
||||||
|
|
||||||
rs6000 )
|
|
||||||
gl_cv_host_cpu_c_abi=powerpc
|
|
||||||
;;
|
|
||||||
|
|
||||||
riscv32 | riscv64 )
|
|
||||||
# There are 2 architectures (with variants): rv32* and rv64*.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if __riscv_xlen == 64
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[cpu=riscv64],
|
|
||||||
[cpu=riscv32])
|
|
||||||
# There are 6 ABIs: ilp32, ilp32f, ilp32d, lp64, lp64f, lp64d.
|
|
||||||
# Size of 'long' and 'void *':
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __LP64__
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[main_abi=lp64],
|
|
||||||
[main_abi=ilp32])
|
|
||||||
# Float ABIs:
|
|
||||||
# __riscv_float_abi_double:
|
|
||||||
# 'float' and 'double' are passed in floating-point registers.
|
|
||||||
# __riscv_float_abi_single:
|
|
||||||
# 'float' are passed in floating-point registers.
|
|
||||||
# __riscv_float_abi_soft:
|
|
||||||
# No values are passed in floating-point registers.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __riscv_float_abi_double
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[float_abi=d],
|
|
||||||
[AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __riscv_float_abi_single
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[float_abi=f],
|
|
||||||
[float_abi=''])
|
|
||||||
])
|
|
||||||
gl_cv_host_cpu_c_abi="${cpu}-${main_abi}${float_abi}"
|
|
||||||
;;
|
|
||||||
|
|
||||||
s390* )
|
|
||||||
# On s390x, the C compiler may be generating 64-bit (= s390x) code
|
|
||||||
# or 31-bit (= s390) code.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __LP64__ || defined __s390x__
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=s390x],
|
|
||||||
[gl_cv_host_cpu_c_abi=s390])
|
|
||||||
;;
|
|
||||||
|
|
||||||
sparc | sparc64 )
|
|
||||||
# UltraSPARCs running Linux have `uname -m` = "sparc64", but the
|
|
||||||
# C compiler still generates 32-bit code.
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[#if defined __sparcv9 || defined __arch64__
|
|
||||||
int ok;
|
|
||||||
#else
|
|
||||||
error fail
|
|
||||||
#endif
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi=sparc64],
|
|
||||||
[gl_cv_host_cpu_c_abi=sparc])
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
gl_cv_host_cpu_c_abi="$host_cpu"
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
])
|
|
||||||
|
|
||||||
dnl In most cases, $HOST_CPU and $HOST_CPU_C_ABI are the same.
|
|
||||||
HOST_CPU=`echo "$gl_cv_host_cpu_c_abi" | sed -e 's/-.*//'`
|
|
||||||
HOST_CPU_C_ABI="$gl_cv_host_cpu_c_abi"
|
|
||||||
AC_SUBST([HOST_CPU])
|
|
||||||
AC_SUBST([HOST_CPU_C_ABI])
|
|
||||||
|
|
||||||
# This was
|
|
||||||
# AC_DEFINE_UNQUOTED([__${HOST_CPU}__])
|
|
||||||
# AC_DEFINE_UNQUOTED([__${HOST_CPU_C_ABI}__])
|
|
||||||
# earlier, but KAI C++ 3.2d doesn't like this.
|
|
||||||
sed -e 's/-/_/g' >> confdefs.h <<EOF
|
|
||||||
#ifndef __${HOST_CPU}__
|
|
||||||
#define __${HOST_CPU}__ 1
|
|
||||||
#endif
|
|
||||||
#ifndef __${HOST_CPU_C_ABI}__
|
|
||||||
#define __${HOST_CPU_C_ABI}__ 1
|
|
||||||
#endif
|
|
||||||
EOF
|
|
||||||
AH_TOP([/* CPU and C ABI indicator */
|
|
||||||
#ifndef __i386__
|
|
||||||
#undef __i386__
|
|
||||||
#endif
|
|
||||||
#ifndef __x86_64_x32__
|
|
||||||
#undef __x86_64_x32__
|
|
||||||
#endif
|
|
||||||
#ifndef __x86_64__
|
|
||||||
#undef __x86_64__
|
|
||||||
#endif
|
|
||||||
#ifndef __alpha__
|
|
||||||
#undef __alpha__
|
|
||||||
#endif
|
|
||||||
#ifndef __arm__
|
|
||||||
#undef __arm__
|
|
||||||
#endif
|
|
||||||
#ifndef __armhf__
|
|
||||||
#undef __armhf__
|
|
||||||
#endif
|
|
||||||
#ifndef __arm64_ilp32__
|
|
||||||
#undef __arm64_ilp32__
|
|
||||||
#endif
|
|
||||||
#ifndef __arm64__
|
|
||||||
#undef __arm64__
|
|
||||||
#endif
|
|
||||||
#ifndef __hppa__
|
|
||||||
#undef __hppa__
|
|
||||||
#endif
|
|
||||||
#ifndef __hppa64__
|
|
||||||
#undef __hppa64__
|
|
||||||
#endif
|
|
||||||
#ifndef __ia64_ilp32__
|
|
||||||
#undef __ia64_ilp32__
|
|
||||||
#endif
|
|
||||||
#ifndef __ia64__
|
|
||||||
#undef __ia64__
|
|
||||||
#endif
|
|
||||||
#ifndef __loongarch32__
|
|
||||||
#undef __loongarch32__
|
|
||||||
#endif
|
|
||||||
#ifndef __loongarch64__
|
|
||||||
#undef __loongarch64__
|
|
||||||
#endif
|
|
||||||
#ifndef __m68k__
|
|
||||||
#undef __m68k__
|
|
||||||
#endif
|
|
||||||
#ifndef __mips__
|
|
||||||
#undef __mips__
|
|
||||||
#endif
|
|
||||||
#ifndef __mipsn32__
|
|
||||||
#undef __mipsn32__
|
|
||||||
#endif
|
|
||||||
#ifndef __mips64__
|
|
||||||
#undef __mips64__
|
|
||||||
#endif
|
|
||||||
#ifndef __powerpc__
|
|
||||||
#undef __powerpc__
|
|
||||||
#endif
|
|
||||||
#ifndef __powerpc64__
|
|
||||||
#undef __powerpc64__
|
|
||||||
#endif
|
|
||||||
#ifndef __powerpc64_elfv2__
|
|
||||||
#undef __powerpc64_elfv2__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv32__
|
|
||||||
#undef __riscv32__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64__
|
|
||||||
#undef __riscv64__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv32_ilp32__
|
|
||||||
#undef __riscv32_ilp32__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv32_ilp32f__
|
|
||||||
#undef __riscv32_ilp32f__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv32_ilp32d__
|
|
||||||
#undef __riscv32_ilp32d__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64_ilp32__
|
|
||||||
#undef __riscv64_ilp32__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64_ilp32f__
|
|
||||||
#undef __riscv64_ilp32f__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64_ilp32d__
|
|
||||||
#undef __riscv64_ilp32d__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64_lp64__
|
|
||||||
#undef __riscv64_lp64__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64_lp64f__
|
|
||||||
#undef __riscv64_lp64f__
|
|
||||||
#endif
|
|
||||||
#ifndef __riscv64_lp64d__
|
|
||||||
#undef __riscv64_lp64d__
|
|
||||||
#endif
|
|
||||||
#ifndef __s390__
|
|
||||||
#undef __s390__
|
|
||||||
#endif
|
|
||||||
#ifndef __s390x__
|
|
||||||
#undef __s390x__
|
|
||||||
#endif
|
|
||||||
#ifndef __sh__
|
|
||||||
#undef __sh__
|
|
||||||
#endif
|
|
||||||
#ifndef __sparc__
|
|
||||||
#undef __sparc__
|
|
||||||
#endif
|
|
||||||
#ifndef __sparc64__
|
|
||||||
#undef __sparc64__
|
|
||||||
#endif
|
|
||||||
])
|
|
||||||
|
|
||||||
])
|
|
||||||
|
|
||||||
|
|
||||||
dnl Sets the HOST_CPU_C_ABI_32BIT variable to 'yes' if the C language ABI
|
|
||||||
dnl (application binary interface) is a 32-bit one, to 'no' if it is a 64-bit
|
|
||||||
dnl one.
|
|
||||||
dnl This is a simplified variant of gl_HOST_CPU_C_ABI.
|
|
||||||
AC_DEFUN([gl_HOST_CPU_C_ABI_32BIT],
|
|
||||||
[
|
|
||||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
|
||||||
AC_CACHE_CHECK([32-bit host C ABI], [gl_cv_host_cpu_c_abi_32bit],
|
|
||||||
[case "$host_cpu" in
|
|
||||||
|
|
||||||
# CPUs that only support a 32-bit ABI.
|
|
||||||
arc \
|
|
||||||
| bfin \
|
|
||||||
| cris* \
|
|
||||||
| csky \
|
|
||||||
| epiphany \
|
|
||||||
| ft32 \
|
|
||||||
| h8300 \
|
|
||||||
| m68k \
|
|
||||||
| microblaze | microblazeel \
|
|
||||||
| nds32 | nds32le | nds32be \
|
|
||||||
| nios2 | nios2eb | nios2el \
|
|
||||||
| or1k* \
|
|
||||||
| or32 \
|
|
||||||
| sh | sh[1234] | sh[1234]e[lb] \
|
|
||||||
| tic6x \
|
|
||||||
| xtensa* )
|
|
||||||
gl_cv_host_cpu_c_abi_32bit=yes
|
|
||||||
;;
|
|
||||||
|
|
||||||
# CPUs that only support a 64-bit ABI.
|
|
||||||
changequote(,)dnl
|
|
||||||
alpha | alphaev[4-8] | alphaev56 | alphapca5[67] | alphaev6[78] \
|
|
||||||
| mmix )
|
|
||||||
changequote([,])dnl
|
|
||||||
gl_cv_host_cpu_c_abi_32bit=no
|
|
||||||
;;
|
|
||||||
|
|
||||||
*)
|
|
||||||
if test -n "$gl_cv_host_cpu_c_abi"; then
|
|
||||||
dnl gl_HOST_CPU_C_ABI has already been run. Use its result.
|
|
||||||
case "$gl_cv_host_cpu_c_abi" in
|
|
||||||
i386 | x86_64-x32 | arm | armhf | arm64-ilp32 | hppa | ia64-ilp32 | loongarch32 | mips | mipsn32 | powerpc | riscv*-ilp32* | s390 | sparc)
|
|
||||||
gl_cv_host_cpu_c_abi_32bit=yes ;;
|
|
||||||
x86_64 | alpha | arm64 | aarch64c | hppa64 | ia64 | loongarch64 | mips64 | powerpc64 | powerpc64-elfv2 | riscv*-lp64* | s390x | sparc64 )
|
|
||||||
gl_cv_host_cpu_c_abi_32bit=no ;;
|
|
||||||
*)
|
|
||||||
gl_cv_host_cpu_c_abi_32bit=unknown ;;
|
|
||||||
esac
|
|
||||||
else
|
|
||||||
gl_cv_host_cpu_c_abi_32bit=unknown
|
|
||||||
fi
|
|
||||||
if test $gl_cv_host_cpu_c_abi_32bit = unknown; then
|
|
||||||
AC_COMPILE_IFELSE(
|
|
||||||
[AC_LANG_SOURCE(
|
|
||||||
[[int test_pointer_size[sizeof (void *) - 5];
|
|
||||||
]])],
|
|
||||||
[gl_cv_host_cpu_c_abi_32bit=no],
|
|
||||||
[gl_cv_host_cpu_c_abi_32bit=yes])
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
])
|
|
||||||
|
|
||||||
HOST_CPU_C_ABI_32BIT="$gl_cv_host_cpu_c_abi_32bit"
|
|
||||||
])
|
|
@ -1,334 +0,0 @@
|
|||||||
# lib-prefix.m4
|
|
||||||
# serial 23
|
|
||||||
dnl Copyright (C) 2001-2005, 2008-2025 Free Software Foundation, Inc.
|
|
||||||
dnl This file is free software; the Free Software Foundation
|
|
||||||
dnl gives unlimited permission to copy and/or distribute it,
|
|
||||||
dnl with or without modifications, as long as this notice is preserved.
|
|
||||||
dnl This file is offered as-is, without any warranty.
|
|
||||||
|
|
||||||
dnl From Bruno Haible.
|
|
||||||
|
|
||||||
dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed
|
|
||||||
dnl to access previously installed libraries. The basic assumption is that
|
|
||||||
dnl a user will want packages to use other packages he previously installed
|
|
||||||
dnl with the same --prefix option.
|
|
||||||
dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate
|
|
||||||
dnl libraries, but is otherwise very convenient.
|
|
||||||
AC_DEFUN([AC_LIB_PREFIX],
|
|
||||||
[
|
|
||||||
AC_BEFORE([$0], [AC_LIB_LINKFLAGS])
|
|
||||||
AC_REQUIRE([AC_PROG_CC])
|
|
||||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
|
||||||
AC_REQUIRE([AC_LIB_PREPARE_MULTILIB])
|
|
||||||
AC_REQUIRE([AC_LIB_PREPARE_PREFIX])
|
|
||||||
dnl By default, look in $includedir and $libdir.
|
|
||||||
use_additional=yes
|
|
||||||
AC_LIB_WITH_FINAL_PREFIX([
|
|
||||||
eval additional_includedir=\"$includedir\"
|
|
||||||
eval additional_libdir=\"$libdir\"
|
|
||||||
])
|
|
||||||
AC_ARG_WITH([lib-prefix],
|
|
||||||
[[ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib
|
|
||||||
--without-lib-prefix don't search for libraries in includedir and libdir]],
|
|
||||||
[
|
|
||||||
if test "X$withval" = "Xno"; then
|
|
||||||
use_additional=no
|
|
||||||
else
|
|
||||||
if test "X$withval" = "X"; then
|
|
||||||
AC_LIB_WITH_FINAL_PREFIX([
|
|
||||||
eval additional_includedir=\"$includedir\"
|
|
||||||
eval additional_libdir=\"$libdir\"
|
|
||||||
])
|
|
||||||
else
|
|
||||||
additional_includedir="$withval/include"
|
|
||||||
additional_libdir="$withval/$acl_libdirstem"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
])
|
|
||||||
if test $use_additional = yes; then
|
|
||||||
dnl Potentially add $additional_includedir to $CPPFLAGS.
|
|
||||||
dnl But don't add it
|
|
||||||
dnl 1. if it's the standard /usr/include,
|
|
||||||
dnl 2. if it's already present in $CPPFLAGS,
|
|
||||||
dnl 3. if it's /usr/local/include and we are using GCC on Linux,
|
|
||||||
dnl 4. if it doesn't exist as a directory.
|
|
||||||
if test "X$additional_includedir" != "X/usr/include"; then
|
|
||||||
haveit=
|
|
||||||
for x in $CPPFLAGS; do
|
|
||||||
AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
|
|
||||||
if test "X$x" = "X-I$additional_includedir"; then
|
|
||||||
haveit=yes
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
if test -z "$haveit"; then
|
|
||||||
if test "X$additional_includedir" = "X/usr/local/include"; then
|
|
||||||
if test -n "$GCC"; then
|
|
||||||
case $host_os in
|
|
||||||
linux* | gnu* | k*bsd*-gnu) haveit=yes;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if test -z "$haveit"; then
|
|
||||||
if test -d "$additional_includedir"; then
|
|
||||||
dnl Really add $additional_includedir to $CPPFLAGS.
|
|
||||||
CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
dnl Potentially add $additional_libdir to $LDFLAGS.
|
|
||||||
dnl But don't add it
|
|
||||||
dnl 1. if it's the standard /usr/lib,
|
|
||||||
dnl 2. if it's already present in $LDFLAGS,
|
|
||||||
dnl 3. if it's /usr/local/lib and we are using GCC on Linux,
|
|
||||||
dnl 4. if it doesn't exist as a directory.
|
|
||||||
if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then
|
|
||||||
haveit=
|
|
||||||
for x in $LDFLAGS; do
|
|
||||||
AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"])
|
|
||||||
if test "X$x" = "X-L$additional_libdir"; then
|
|
||||||
haveit=yes
|
|
||||||
break
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
if test -z "$haveit"; then
|
|
||||||
if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then
|
|
||||||
if test -n "$GCC"; then
|
|
||||||
case $host_os in
|
|
||||||
linux*) haveit=yes;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if test -z "$haveit"; then
|
|
||||||
if test -d "$additional_libdir"; then
|
|
||||||
dnl Really add $additional_libdir to $LDFLAGS.
|
|
||||||
LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir"
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
])
|
|
||||||
|
|
||||||
dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix,
|
|
||||||
dnl acl_final_exec_prefix, containing the values to which $prefix and
|
|
||||||
dnl $exec_prefix will expand at the end of the configure script.
|
|
||||||
AC_DEFUN([AC_LIB_PREPARE_PREFIX],
|
|
||||||
[
|
|
||||||
dnl Unfortunately, prefix and exec_prefix get only finally determined
|
|
||||||
dnl at the end of configure.
|
|
||||||
if test "X$prefix" = "XNONE"; then
|
|
||||||
acl_final_prefix="$ac_default_prefix"
|
|
||||||
else
|
|
||||||
acl_final_prefix="$prefix"
|
|
||||||
fi
|
|
||||||
if test "X$exec_prefix" = "XNONE"; then
|
|
||||||
acl_final_exec_prefix='${prefix}'
|
|
||||||
else
|
|
||||||
acl_final_exec_prefix="$exec_prefix"
|
|
||||||
fi
|
|
||||||
acl_saved_prefix="$prefix"
|
|
||||||
prefix="$acl_final_prefix"
|
|
||||||
eval acl_final_exec_prefix=\"$acl_final_exec_prefix\"
|
|
||||||
prefix="$acl_saved_prefix"
|
|
||||||
])
|
|
||||||
|
|
||||||
dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the
|
|
||||||
dnl variables prefix and exec_prefix bound to the values they will have
|
|
||||||
dnl at the end of the configure script.
|
|
||||||
AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX],
|
|
||||||
[
|
|
||||||
acl_saved_prefix="$prefix"
|
|
||||||
prefix="$acl_final_prefix"
|
|
||||||
acl_saved_exec_prefix="$exec_prefix"
|
|
||||||
exec_prefix="$acl_final_exec_prefix"
|
|
||||||
$1
|
|
||||||
exec_prefix="$acl_saved_exec_prefix"
|
|
||||||
prefix="$acl_saved_prefix"
|
|
||||||
])
|
|
||||||
|
|
||||||
dnl AC_LIB_PREPARE_MULTILIB creates
|
|
||||||
dnl - a function acl_is_expected_elfclass, that tests whether standard input
|
|
||||||
dnl; has a 32-bit or 64-bit ELF header, depending on the host CPU ABI,
|
|
||||||
dnl - 3 variables acl_libdirstem, acl_libdirstem2, acl_libdirstem3, containing
|
|
||||||
dnl the basename of the libdir to try in turn, either "lib" or "lib64" or
|
|
||||||
dnl "lib/64" or "lib32" or "lib/sparcv9" or "lib/amd64" or similar.
|
|
||||||
AC_DEFUN([AC_LIB_PREPARE_MULTILIB],
|
|
||||||
[
|
|
||||||
dnl There is no formal standard regarding lib, lib32, and lib64.
|
|
||||||
dnl On most glibc systems, the current practice is that on a system supporting
|
|
||||||
dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under
|
|
||||||
dnl $prefix/lib64 and 32-bit libraries go under $prefix/lib. However, on
|
|
||||||
dnl Arch Linux based distributions, it's the opposite: 32-bit libraries go
|
|
||||||
dnl under $prefix/lib32 and 64-bit libraries go under $prefix/lib.
|
|
||||||
dnl We determine the compiler's default mode by looking at the compiler's
|
|
||||||
dnl library search path. If at least one of its elements ends in /lib64 or
|
|
||||||
dnl points to a directory whose absolute pathname ends in /lib64, we use that
|
|
||||||
dnl for 64-bit ABIs. Similarly for 32-bit ABIs. Otherwise we use the default,
|
|
||||||
dnl namely "lib".
|
|
||||||
dnl On Solaris systems, the current practice is that on a system supporting
|
|
||||||
dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under
|
|
||||||
dnl $prefix/lib/64 (which is a symlink to either $prefix/lib/sparcv9 or
|
|
||||||
dnl $prefix/lib/amd64) and 32-bit libraries go under $prefix/lib.
|
|
||||||
AC_REQUIRE([AC_CANONICAL_HOST])
|
|
||||||
AC_REQUIRE([gl_HOST_CPU_C_ABI_32BIT])
|
|
||||||
|
|
||||||
AC_CACHE_CHECK([for ELF binary format], [gl_cv_elf],
|
|
||||||
[AC_EGREP_CPP([Extensible Linking Format],
|
|
||||||
[#if defined __ELF__ || (defined __linux__ && (defined __EDG__ || defined __SUNPRO_C))
|
|
||||||
Extensible Linking Format
|
|
||||||
#endif
|
|
||||||
],
|
|
||||||
[gl_cv_elf=yes],
|
|
||||||
[gl_cv_elf=no])
|
|
||||||
])
|
|
||||||
if test $gl_cv_elf = yes; then
|
|
||||||
# Extract the ELF class of a file (5th byte) in decimal.
|
|
||||||
# Cf. https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
|
|
||||||
if od -A x < /dev/null >/dev/null 2>/dev/null; then
|
|
||||||
# Use POSIX od.
|
|
||||||
func_elfclass ()
|
|
||||||
{
|
|
||||||
od -A n -t d1 -j 4 -N 1
|
|
||||||
}
|
|
||||||
else
|
|
||||||
# Use BSD hexdump.
|
|
||||||
func_elfclass ()
|
|
||||||
{
|
|
||||||
dd bs=1 count=1 skip=4 2>/dev/null | hexdump -e '1/1 "%3d "'
|
|
||||||
echo
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
# Use 'expr', not 'test', to compare the values of func_elfclass, because on
|
|
||||||
# Solaris 11 OpenIndiana and Solaris 11 OmniOS, the result is 001 or 002,
|
|
||||||
# not 1 or 2.
|
|
||||||
changequote(,)dnl
|
|
||||||
case $HOST_CPU_C_ABI_32BIT in
|
|
||||||
yes)
|
|
||||||
# 32-bit ABI.
|
|
||||||
acl_is_expected_elfclass ()
|
|
||||||
{
|
|
||||||
expr "`func_elfclass | sed -e 's/[ ]//g'`" = 1 > /dev/null
|
|
||||||
}
|
|
||||||
;;
|
|
||||||
no)
|
|
||||||
# 64-bit ABI.
|
|
||||||
acl_is_expected_elfclass ()
|
|
||||||
{
|
|
||||||
expr "`func_elfclass | sed -e 's/[ ]//g'`" = 2 > /dev/null
|
|
||||||
}
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
# Unknown.
|
|
||||||
acl_is_expected_elfclass ()
|
|
||||||
{
|
|
||||||
:
|
|
||||||
}
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
changequote([,])dnl
|
|
||||||
else
|
|
||||||
acl_is_expected_elfclass ()
|
|
||||||
{
|
|
||||||
:
|
|
||||||
}
|
|
||||||
fi
|
|
||||||
|
|
||||||
dnl Allow the user to override the result by setting acl_cv_libdirstems.
|
|
||||||
AC_CACHE_CHECK([for the common suffixes of directories in the library search path],
|
|
||||||
[acl_cv_libdirstems],
|
|
||||||
[dnl Try 'lib' first, because that's the default for libdir in GNU, see
|
|
||||||
dnl <https://www.gnu.org/prep/standards/html_node/Directory-Variables.html>.
|
|
||||||
acl_libdirstem=lib
|
|
||||||
acl_libdirstem2=
|
|
||||||
acl_libdirstem3=
|
|
||||||
case "$host_os" in
|
|
||||||
solaris*)
|
|
||||||
dnl See Solaris 10 Software Developer Collection > Solaris 64-bit Developer's Guide > The Development Environment
|
|
||||||
dnl <https://docs.oracle.com/cd/E19253-01/816-5138/dev-env/index.html>.
|
|
||||||
dnl "Portable Makefiles should refer to any library directories using the 64 symbolic link."
|
|
||||||
dnl But we want to recognize the sparcv9 or amd64 subdirectory also if the
|
|
||||||
dnl symlink is missing, so we set acl_libdirstem2 too.
|
|
||||||
if test $HOST_CPU_C_ABI_32BIT = no; then
|
|
||||||
acl_libdirstem2=lib/64
|
|
||||||
case "$host_cpu" in
|
|
||||||
sparc*) acl_libdirstem3=lib/sparcv9 ;;
|
|
||||||
i*86 | x86_64) acl_libdirstem3=lib/amd64 ;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
netbsd*)
|
|
||||||
dnl On NetBSD/sparc64, there is a 'sparc' subdirectory that contains
|
|
||||||
dnl 32-bit libraries.
|
|
||||||
if test $HOST_CPU_C_ABI_32BIT != no; then
|
|
||||||
case "$host_cpu" in
|
|
||||||
sparc*) acl_libdirstem2=lib/sparc ;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
*)
|
|
||||||
dnl If $CC generates code for a 32-bit ABI, the libraries are
|
|
||||||
dnl surely under $prefix/lib or $prefix/lib32, not $prefix/lib64.
|
|
||||||
dnl Similarly, if $CC generates code for a 64-bit ABI, the libraries
|
|
||||||
dnl are surely under $prefix/lib or $prefix/lib64, not $prefix/lib32.
|
|
||||||
dnl Find the compiler's search path. However, non-system compilers
|
|
||||||
dnl sometimes have odd library search paths. But we can't simply invoke
|
|
||||||
dnl '/usr/bin/gcc -print-search-dirs' because that would not take into
|
|
||||||
dnl account the -m32/-m31 or -m64 options from the $CC or $CFLAGS.
|
|
||||||
searchpath=`(LC_ALL=C $CC $CPPFLAGS $CFLAGS -print-search-dirs) 2>/dev/null \
|
|
||||||
| sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'`
|
|
||||||
if test $HOST_CPU_C_ABI_32BIT != no; then
|
|
||||||
# 32-bit or unknown ABI.
|
|
||||||
if test -d /usr/lib32; then
|
|
||||||
acl_libdirstem2=lib32
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if test $HOST_CPU_C_ABI_32BIT != yes; then
|
|
||||||
# 64-bit or unknown ABI.
|
|
||||||
if test -d /usr/lib64; then
|
|
||||||
acl_libdirstem3=lib64
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
if test -n "$searchpath"; then
|
|
||||||
acl_saved_IFS="${IFS= }"; IFS=":"
|
|
||||||
for searchdir in $searchpath; do
|
|
||||||
if test -d "$searchdir"; then
|
|
||||||
case "$searchdir" in
|
|
||||||
*/lib32/ | */lib32 ) acl_libdirstem2=lib32 ;;
|
|
||||||
*/lib64/ | */lib64 ) acl_libdirstem3=lib64 ;;
|
|
||||||
*/../ | */.. )
|
|
||||||
# Better ignore directories of this form. They are misleading.
|
|
||||||
;;
|
|
||||||
*) searchdir=`cd "$searchdir" && pwd`
|
|
||||||
case "$searchdir" in
|
|
||||||
*/lib32 ) acl_libdirstem2=lib32 ;;
|
|
||||||
*/lib64 ) acl_libdirstem3=lib64 ;;
|
|
||||||
esac ;;
|
|
||||||
esac
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
IFS="$acl_saved_IFS"
|
|
||||||
if test $HOST_CPU_C_ABI_32BIT = yes; then
|
|
||||||
# 32-bit ABI.
|
|
||||||
acl_libdirstem3=
|
|
||||||
fi
|
|
||||||
if test $HOST_CPU_C_ABI_32BIT = no; then
|
|
||||||
# 64-bit ABI.
|
|
||||||
acl_libdirstem2=
|
|
||||||
fi
|
|
||||||
fi
|
|
||||||
;;
|
|
||||||
esac
|
|
||||||
test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem"
|
|
||||||
test -n "$acl_libdirstem3" || acl_libdirstem3="$acl_libdirstem"
|
|
||||||
acl_cv_libdirstems="$acl_libdirstem,$acl_libdirstem2,$acl_libdirstem3"
|
|
||||||
])
|
|
||||||
dnl Decompose acl_cv_libdirstems into acl_libdirstem, acl_libdirstem2, and
|
|
||||||
dnl acl_libdirstem3.
|
|
||||||
changequote(,)dnl
|
|
||||||
acl_libdirstem=`echo "$acl_cv_libdirstems" | sed -e 's/,.*//'`
|
|
||||||
acl_libdirstem2=`echo "$acl_cv_libdirstems" | sed -e 's/^[^,]*,//' -e 's/,.*//'`
|
|
||||||
acl_libdirstem3=`echo "$acl_cv_libdirstems" | sed -e 's/^[^,]*,[^,]*,//' -e 's/,.*//'`
|
|
||||||
changequote([,])dnl
|
|
||||||
])
|
|
1
scripts/.gitignore
vendored
1
scripts/.gitignore
vendored
@ -17,4 +17,3 @@ thread_analysis
|
|||||||
tls_test
|
tls_test
|
||||||
timeattack
|
timeattack
|
||||||
os_info
|
os_info
|
||||||
nist_kem_kat
|
|
||||||
|
@ -7,7 +7,7 @@ AM_CPPFLAGS = \
|
|||||||
|
|
||||||
noinst_PROGRAMS = bin2array bin2sql id2sql key2keyid keyid2sql oid2der \
|
noinst_PROGRAMS = bin2array bin2sql id2sql key2keyid keyid2sql oid2der \
|
||||||
thread_analysis dh_speed pubkey_speed crypt_burn hash_burn fetch \
|
thread_analysis dh_speed pubkey_speed crypt_burn hash_burn fetch \
|
||||||
dnssec malloc_speed aes-test settings-test timeattack nist_kem_kat
|
dnssec malloc_speed aes-test settings-test timeattack
|
||||||
|
|
||||||
if USE_TLS
|
if USE_TLS
|
||||||
noinst_PROGRAMS += tls_test
|
noinst_PROGRAMS += tls_test
|
||||||
@ -31,7 +31,6 @@ malloc_speed_SOURCES = malloc_speed.c
|
|||||||
fetch_SOURCES = fetch.c
|
fetch_SOURCES = fetch.c
|
||||||
dnssec_SOURCES = dnssec.c
|
dnssec_SOURCES = dnssec.c
|
||||||
timeattack_SOURCES = timeattack.c
|
timeattack_SOURCES = timeattack.c
|
||||||
nist_kem_kat_SOURCES = nist_kem_kat.c
|
|
||||||
|
|
||||||
id2sql_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
id2sql_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||||
key2keyid_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
key2keyid_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||||
@ -47,7 +46,6 @@ dnssec_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
|||||||
aes_test_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
aes_test_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||||
settings_test_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
settings_test_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
||||||
timeattack_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la $(RTLIB)
|
timeattack_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la $(RTLIB)
|
||||||
nist_kem_kat_LDADD = $(top_builddir)/src/libstrongswan/libstrongswan.la
|
|
||||||
|
|
||||||
if USE_IMCV
|
if USE_IMCV
|
||||||
AM_CPPFLAGS += -I$(top_srcdir)/src/libimcv
|
AM_CPPFLAGS += -I$(top_srcdir)/src/libimcv
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2023-2024 Tobias Brunner
|
* Copyright (C) 2023 Tobias Brunner
|
||||||
* Copyright (C) 2009 Martin Willi
|
* Copyright (C) 2009 Martin Willi
|
||||||
*
|
*
|
||||||
* Copyright (C) secunet Security Networks AG
|
* Copyright (C) secunet Security Networks AG
|
||||||
@ -56,14 +56,13 @@ static void run_test(key_exchange_method_t method, int rounds)
|
|||||||
method);
|
method);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
assert(r[0]->get_public_key(r[0], &rpublic[0]));
|
||||||
for (round = 1; round < rounds; round++)
|
for (round = 1; round < rounds; round++)
|
||||||
{
|
{
|
||||||
r[round] = lib->crypto->create_ke(lib->crypto, method);
|
r[round] = lib->crypto->create_ke(lib->crypto, method);
|
||||||
|
assert(r[round]->get_public_key(r[round], &rpublic[round]));
|
||||||
}
|
}
|
||||||
|
|
||||||
/* make sure to use the method call order documented in the
|
|
||||||
* key_exchange_t header file */
|
|
||||||
|
|
||||||
printf("%N:\t", key_exchange_method_names, method);
|
printf("%N:\t", key_exchange_method_names, method);
|
||||||
|
|
||||||
start_timing(&timing);
|
start_timing(&timing);
|
||||||
@ -74,14 +73,12 @@ static void run_test(key_exchange_method_t method, int rounds)
|
|||||||
}
|
}
|
||||||
printf("A = g^a/s: %8.1f", rounds / end_timing(&timing));
|
printf("A = g^a/s: %8.1f", rounds / end_timing(&timing));
|
||||||
|
|
||||||
start_timing(&timing);
|
|
||||||
for (round = 0; round < rounds; round++)
|
for (round = 0; round < rounds; round++)
|
||||||
{
|
{
|
||||||
assert(r[round]->set_public_key(r[round], lpublic[round]));
|
assert(r[round]->set_public_key(r[round], lpublic[round]));
|
||||||
assert(r[round]->get_public_key(r[round], &rpublic[round]));
|
|
||||||
assert(r[round]->get_shared_secret(r[round], &rsecret[round]));
|
assert(r[round]->get_shared_secret(r[round], &rsecret[round]));
|
||||||
|
chunk_free(&lpublic[round]);
|
||||||
}
|
}
|
||||||
printf(" | S = A^b/s: %8.1f", rounds / end_timing(&timing));
|
|
||||||
|
|
||||||
start_timing(&timing);
|
start_timing(&timing);
|
||||||
for (round = 0; round < rounds; round++)
|
for (round = 0; round < rounds; round++)
|
||||||
@ -96,7 +93,6 @@ static void run_test(key_exchange_method_t method, int rounds)
|
|||||||
assert(chunk_equals(rsecret[round], lsecret[round]));
|
assert(chunk_equals(rsecret[round], lsecret[round]));
|
||||||
chunk_free(&lsecret[round]);
|
chunk_free(&lsecret[round]);
|
||||||
chunk_free(&rsecret[round]);
|
chunk_free(&rsecret[round]);
|
||||||
chunk_free(&lpublic[round]);
|
|
||||||
chunk_free(&rpublic[round]);
|
chunk_free(&rpublic[round]);
|
||||||
l[round]->destroy(l[round]);
|
l[round]->destroy(l[round]);
|
||||||
r[round]->destroy(r[round]);
|
r[round]->destroy(r[round]);
|
||||||
|
@ -1,189 +0,0 @@
|
|||||||
/*
|
|
||||||
* Copyright (C) 2019-2020 Andreas Steffen
|
|
||||||
*
|
|
||||||
* Copyright (C) secunet Security Networks AG
|
|
||||||
*
|
|
||||||
* This program is free software; you can redistribute it and/or modify it
|
|
||||||
* under the terms of the GNU General Public License as published by the
|
|
||||||
* Free Software Foundation; either version 2 of the License, or (at your
|
|
||||||
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
|
|
||||||
*
|
|
||||||
* This program is distributed in the hope that it will be useful, but
|
|
||||||
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
|
|
||||||
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
|
|
||||||
* for more details.
|
|
||||||
*/
|
|
||||||
|
|
||||||
#include <stdio.h>
|
|
||||||
#include <stdlib.h>
|
|
||||||
#include <string.h>
|
|
||||||
#include <unistd.h>
|
|
||||||
#include <getopt.h>
|
|
||||||
#include <errno.h>
|
|
||||||
|
|
||||||
#include <library.h>
|
|
||||||
|
|
||||||
static void usage(FILE *out, char *name)
|
|
||||||
{
|
|
||||||
fprintf(out, "Convert NIST KEM KAT file into struct\n");
|
|
||||||
fprintf(out, "%s [OPTIONS]\n\n", name);
|
|
||||||
fprintf(out, "Options:\n");
|
|
||||||
fprintf(out, " -h, --help print this help.\n");
|
|
||||||
fprintf(out, " -m, --method KEM method.\n");
|
|
||||||
fprintf(out, " -c, --count number of structs (default 4).\n");
|
|
||||||
fprintf(out, " -i, --in=FILE request file (default STDIN).\n");
|
|
||||||
fprintf(out, " -o, --out=FILE response file (default STDOUT).\n");
|
|
||||||
fprintf(out, "\n");
|
|
||||||
}
|
|
||||||
|
|
||||||
int main(int argc, char *argv[])
|
|
||||||
{
|
|
||||||
FILE *in = stdin;
|
|
||||||
FILE *out = stdout;
|
|
||||||
char line[90000], *method = "", *pos, *eol, *param, *value;
|
|
||||||
size_t param_len, value_len;
|
|
||||||
int count = 4, n;
|
|
||||||
|
|
||||||
library_init(NULL, "nist-kem-kat");
|
|
||||||
atexit(library_deinit);
|
|
||||||
|
|
||||||
while (true)
|
|
||||||
{
|
|
||||||
struct option long_opts[] = {
|
|
||||||
{"help", no_argument, NULL, 'h' },
|
|
||||||
{"method", required_argument, NULL, 'm' },
|
|
||||||
{"count", required_argument, NULL, 'c' },
|
|
||||||
{"in", required_argument, NULL, 'i' },
|
|
||||||
{"out", required_argument, NULL, 'o' },
|
|
||||||
{0,0,0,0 },
|
|
||||||
};
|
|
||||||
switch (getopt_long(argc, argv, "h:m:c:i:o:", long_opts, NULL))
|
|
||||||
{
|
|
||||||
case EOF:
|
|
||||||
break;
|
|
||||||
case 'h':
|
|
||||||
usage(stdout, argv[0]);
|
|
||||||
return 0;
|
|
||||||
case 'm':
|
|
||||||
method = optarg;
|
|
||||||
continue;
|
|
||||||
case 'c':
|
|
||||||
count = atoi(optarg);
|
|
||||||
continue;
|
|
||||||
case 'i':
|
|
||||||
in = fopen(optarg, "r");
|
|
||||||
if (!in)
|
|
||||||
{
|
|
||||||
fprintf(stderr, "failed to open '%s': %s\n", optarg,
|
|
||||||
strerror(errno));
|
|
||||||
usage(stderr, argv[0]);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
case 'o':
|
|
||||||
out = fopen(optarg, "w");
|
|
||||||
if (!out)
|
|
||||||
{
|
|
||||||
fprintf(stderr, "failed to open '%s': %s\n", optarg,
|
|
||||||
strerror(errno));
|
|
||||||
usage(stderr, argv[0]);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
continue;
|
|
||||||
default:
|
|
||||||
usage(stderr, argv[0]);
|
|
||||||
return 1;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
|
|
||||||
while (fgets(line, sizeof(line), in))
|
|
||||||
{
|
|
||||||
pos = strchr(line, '=');
|
|
||||||
if (!pos)
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
/*remove preceding whitespace from value */
|
|
||||||
value = pos + 1;
|
|
||||||
eol = strchr(value, '\n');
|
|
||||||
if (!eol)
|
|
||||||
{
|
|
||||||
fprintf(stderr, "eol not found\n");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
value_len = eol - value;
|
|
||||||
while (value_len && *value == ' ')
|
|
||||||
{
|
|
||||||
value++;
|
|
||||||
value_len--;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* remove trailing whitespace from param */
|
|
||||||
param = line;
|
|
||||||
param_len = pos - line;
|
|
||||||
while (param_len && *(--pos) == ' ')
|
|
||||||
{
|
|
||||||
param_len--;
|
|
||||||
}
|
|
||||||
param[param_len] = '\0';
|
|
||||||
|
|
||||||
if (streq(param, "sk"))
|
|
||||||
{
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (streq(param, "count"))
|
|
||||||
{
|
|
||||||
if (count == 0)
|
|
||||||
{
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
fprintf(out, "/** count = %.*s */\n", (int)value_len, value);
|
|
||||||
fprintf(out, "{\n");
|
|
||||||
fprintf(out, "\t.method = %s,\n", method);
|
|
||||||
count--;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
fprintf(out, "\t.%s = chunk_from_chars(", param);
|
|
||||||
n = 0;
|
|
||||||
|
|
||||||
while (value_len > 1)
|
|
||||||
{
|
|
||||||
if (n > 0)
|
|
||||||
{
|
|
||||||
fprintf(out, ",");
|
|
||||||
if (n % 100 == 0)
|
|
||||||
{
|
|
||||||
fprintf(out, " /* %d */\n", n);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (n % 10 == 0)
|
|
||||||
{
|
|
||||||
fprintf(out, "\n\t\t");
|
|
||||||
}
|
|
||||||
fprintf(out, "0x%.2s", value);
|
|
||||||
value += 2;
|
|
||||||
value_len -= 2;
|
|
||||||
n++;
|
|
||||||
}
|
|
||||||
fprintf(out, "),\n");
|
|
||||||
if (streq(param, "ss"))
|
|
||||||
{
|
|
||||||
fprintf(out, "},\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (in != stdin)
|
|
||||||
{
|
|
||||||
fclose(in);
|
|
||||||
}
|
|
||||||
if (out != stdout)
|
|
||||||
{
|
|
||||||
fclose(out);
|
|
||||||
}
|
|
||||||
return 0;
|
|
||||||
}
|
|
199
scripts/test.sh
199
scripts/test.sh
@ -4,7 +4,7 @@
|
|||||||
build_botan()
|
build_botan()
|
||||||
{
|
{
|
||||||
# same revision used in the build recipe of the testing environment
|
# same revision used in the build recipe of the testing environment
|
||||||
BOTAN_REV=3.7.1
|
BOTAN_REV=3.3.0
|
||||||
BOTAN_DIR=$DEPS_BUILD_DIR/botan
|
BOTAN_DIR=$DEPS_BUILD_DIR/botan
|
||||||
|
|
||||||
if test -d "$BOTAN_DIR"; then
|
if test -d "$BOTAN_DIR"; then
|
||||||
@ -28,8 +28,8 @@ build_botan()
|
|||||||
git clone https://github.com/randombit/botan.git $BOTAN_DIR &&
|
git clone https://github.com/randombit/botan.git $BOTAN_DIR &&
|
||||||
cd $BOTAN_DIR &&
|
cd $BOTAN_DIR &&
|
||||||
git checkout -qf $BOTAN_REV &&
|
git checkout -qf $BOTAN_REV &&
|
||||||
./configure.py --amalgamation $BOTAN_CONFIG &&
|
python ./configure.py --amalgamation $BOTAN_CONFIG &&
|
||||||
make -j$(nproc) libs >/dev/null &&
|
make -j4 libs >/dev/null &&
|
||||||
sudo make install >/dev/null &&
|
sudo make install >/dev/null &&
|
||||||
sudo ldconfig || exit $?
|
sudo ldconfig || exit $?
|
||||||
cd -
|
cd -
|
||||||
@ -37,7 +37,7 @@ build_botan()
|
|||||||
|
|
||||||
build_wolfssl()
|
build_wolfssl()
|
||||||
{
|
{
|
||||||
WOLFSSL_REV=v5.8.2-stable
|
WOLFSSL_REV=v5.6.4-stable
|
||||||
WOLFSSL_DIR=$DEPS_BUILD_DIR/wolfssl
|
WOLFSSL_DIR=$DEPS_BUILD_DIR/wolfssl
|
||||||
|
|
||||||
if test -d "$WOLFSSL_DIR"; then
|
if test -d "$WOLFSSL_DIR"; then
|
||||||
@ -47,22 +47,21 @@ build_wolfssl()
|
|||||||
echo "$ build_wolfssl()"
|
echo "$ build_wolfssl()"
|
||||||
|
|
||||||
WOLFSSL_CFLAGS="-DWOLFSSL_PUBLIC_MP -DWOLFSSL_DES_ECB -DHAVE_AES_ECB \
|
WOLFSSL_CFLAGS="-DWOLFSSL_PUBLIC_MP -DWOLFSSL_DES_ECB -DHAVE_AES_ECB \
|
||||||
-DHAVE_ECC_BRAINPOOL -DWOLFSSL_MIN_AUTH_TAG_SZ=8 \
|
-DHAVE_ECC_BRAINPOOL -DWOLFSSL_MIN_AUTH_TAG_SZ=8"
|
||||||
-DRSA_MIN_SIZE=1024"
|
|
||||||
WOLFSSL_CONFIG="--prefix=$DEPS_PREFIX
|
WOLFSSL_CONFIG="--prefix=$DEPS_PREFIX
|
||||||
--disable-crypttests --disable-examples
|
--disable-crypttests --disable-examples
|
||||||
--enable-aesccm --enable-aesctr --enable-camellia
|
--enable-aesccm --enable-aesctr --enable-camellia
|
||||||
--enable-curve25519 --enable-curve448 --enable-des3
|
--enable-curve25519 --enable-curve448 --enable-des3
|
||||||
--enable-ecccustcurves --enable-ed25519 --enable-ed448
|
--enable-ecccustcurves --enable-ed25519 --enable-ed448
|
||||||
--enable-keygen --enable-mlkem --with-max-rsa-bits=8192
|
--enable-keygen --with-max-rsa-bits=8192 --enable-md4
|
||||||
--enable-md4 --enable-rsapss --enable-sha3 --enable-shake256"
|
--enable-rsapss --enable-sha3 --enable-shake256"
|
||||||
|
|
||||||
git clone https://github.com/wolfSSL/wolfssl.git $WOLFSSL_DIR &&
|
git clone https://github.com/wolfSSL/wolfssl.git $WOLFSSL_DIR &&
|
||||||
cd $WOLFSSL_DIR &&
|
cd $WOLFSSL_DIR &&
|
||||||
git checkout -qf $WOLFSSL_REV &&
|
git checkout -qf $WOLFSSL_REV &&
|
||||||
./autogen.sh &&
|
./autogen.sh &&
|
||||||
./configure C_EXTRA_FLAGS="$WOLFSSL_CFLAGS" $WOLFSSL_CONFIG &&
|
./configure C_EXTRA_FLAGS="$WOLFSSL_CFLAGS" $WOLFSSL_CONFIG &&
|
||||||
make -j$(nproc) >/dev/null &&
|
make -j4 >/dev/null &&
|
||||||
sudo make install >/dev/null &&
|
sudo make install >/dev/null &&
|
||||||
sudo ldconfig || exit $?
|
sudo ldconfig || exit $?
|
||||||
cd -
|
cd -
|
||||||
@ -70,7 +69,7 @@ build_wolfssl()
|
|||||||
|
|
||||||
build_tss2()
|
build_tss2()
|
||||||
{
|
{
|
||||||
TSS2_REV=3.2.3
|
TSS2_REV=3.2.2
|
||||||
TSS2_PKG=tpm2-tss-$TSS2_REV
|
TSS2_PKG=tpm2-tss-$TSS2_REV
|
||||||
TSS2_DIR=$DEPS_BUILD_DIR/$TSS2_PKG
|
TSS2_DIR=$DEPS_BUILD_DIR/$TSS2_PKG
|
||||||
TSS2_SRC=https://github.com/tpm2-software/tpm2-tss/releases/download/$TSS2_REV/$TSS2_PKG.tar.gz
|
TSS2_SRC=https://github.com/tpm2-software/tpm2-tss/releases/download/$TSS2_REV/$TSS2_PKG.tar.gz
|
||||||
@ -84,7 +83,7 @@ build_tss2()
|
|||||||
curl -L $TSS2_SRC | tar xz -C $DEPS_BUILD_DIR &&
|
curl -L $TSS2_SRC | tar xz -C $DEPS_BUILD_DIR &&
|
||||||
cd $TSS2_DIR &&
|
cd $TSS2_DIR &&
|
||||||
./configure --prefix=$DEPS_PREFIX --disable-doxygen-doc &&
|
./configure --prefix=$DEPS_PREFIX --disable-doxygen-doc &&
|
||||||
make -j$(nproc) >/dev/null &&
|
make -j4 >/dev/null &&
|
||||||
sudo make install >/dev/null &&
|
sudo make install >/dev/null &&
|
||||||
sudo ldconfig || exit $?
|
sudo ldconfig || exit $?
|
||||||
cd -
|
cd -
|
||||||
@ -92,30 +91,26 @@ build_tss2()
|
|||||||
|
|
||||||
build_openssl()
|
build_openssl()
|
||||||
{
|
{
|
||||||
SSL_REV=openssl-3.5.1
|
SSL_REV=3.1.1
|
||||||
SSL_DIR=$DEPS_BUILD_DIR/openssl
|
SSL_PKG=openssl-$SSL_REV
|
||||||
|
SSL_DIR=$DEPS_BUILD_DIR/$SSL_PKG
|
||||||
|
SSL_SRC=https://www.openssl.org/source/$SSL_PKG.tar.gz
|
||||||
SSL_INS=$DEPS_PREFIX/ssl
|
SSL_INS=$DEPS_PREFIX/ssl
|
||||||
SSL_OPT="-d shared no-dtls no-ssl3 no-zlib no-idea no-psk
|
SSL_OPT="-d shared no-dtls no-ssl3 no-zlib no-idea no-psk no-srp
|
||||||
no-tests enable-rfc3779 enable-ec_nistp_64_gcc_128"
|
no-tests enable-rfc3779 enable-ec_nistp_64_gcc_128"
|
||||||
|
|
||||||
if test -d "$SSL_DIR"; then
|
if test -d "$SSL_DIR"; then
|
||||||
return
|
return
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# insist on compiling with gcc and debug information as symbols are otherwise not found
|
||||||
if test "$LEAK_DETECTIVE" = "yes"; then
|
if test "$LEAK_DETECTIVE" = "yes"; then
|
||||||
# insist on compiling with gcc and debug information as symbols are
|
SSL_OPT="$SSL_OPT CC=gcc -d"
|
||||||
# otherwise not found, but we can disable SRP (see below)
|
|
||||||
SSL_OPT="$SSL_OPT no-srp CC=gcc -d"
|
|
||||||
elif test "$CC" != "clang"; then
|
|
||||||
# when using ASan with clang, llvm-symbolizer is used to resolve symbols
|
|
||||||
# and this tool links libcurl, which in turn requires SRP, so we can
|
|
||||||
# only disable it when not building with clang
|
|
||||||
SSL_OPT="$SSL_OPT no-srp"
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "$ build_openssl()"
|
echo "$ build_openssl()"
|
||||||
|
|
||||||
git clone https://github.com/openssl/openssl.git --depth 1 -b $SSL_REV $SSL_DIR || exit $?
|
curl -L $SSL_SRC | tar xz -C $DEPS_BUILD_DIR || exit $?
|
||||||
|
|
||||||
if [ "$TEST" = "android" ]; then
|
if [ "$TEST" = "android" ]; then
|
||||||
OPENSSL_SRC=${SSL_DIR} \
|
OPENSSL_SRC=${SSL_DIR} \
|
||||||
@ -123,7 +118,7 @@ build_openssl()
|
|||||||
else
|
else
|
||||||
cd $SSL_DIR &&
|
cd $SSL_DIR &&
|
||||||
./config --prefix=$SSL_INS --openssldir=$SSL_INS --libdir=lib $SSL_OPT &&
|
./config --prefix=$SSL_INS --openssldir=$SSL_INS --libdir=lib $SSL_OPT &&
|
||||||
make -j$(nproc) >/dev/null &&
|
make -j4 >/dev/null &&
|
||||||
sudo make install_sw >/dev/null &&
|
sudo make install_sw >/dev/null &&
|
||||||
sudo ldconfig || exit $?
|
sudo ldconfig || exit $?
|
||||||
cd -
|
cd -
|
||||||
@ -132,7 +127,7 @@ build_openssl()
|
|||||||
|
|
||||||
build_awslc()
|
build_awslc()
|
||||||
{
|
{
|
||||||
LC_REV=1.55.0
|
LC_REV=1.22.0
|
||||||
LC_PKG=aws-lc-$LC_REV
|
LC_PKG=aws-lc-$LC_REV
|
||||||
LC_DIR=$DEPS_BUILD_DIR/$LC_PKG
|
LC_DIR=$DEPS_BUILD_DIR/$LC_PKG
|
||||||
LC_SRC=https://github.com/aws/aws-lc/archive/refs/tags/v${LC_REV}.tar.gz
|
LC_SRC=https://github.com/aws/aws-lc/archive/refs/tags/v${LC_REV}.tar.gz
|
||||||
@ -177,7 +172,7 @@ system_uses_openssl3()
|
|||||||
|
|
||||||
prepare_system_openssl()
|
prepare_system_openssl()
|
||||||
{
|
{
|
||||||
# On systems that ship OpenSSL 3 (e.g. Ubuntu 22.04+), we require debug
|
# On systems that ship OpenSSL 3 (e.g. Ubuntu 22.04), we require debug
|
||||||
# symbols to whitelist leaks
|
# symbols to whitelist leaks
|
||||||
if test "$1" = "deps"; then
|
if test "$1" = "deps"; then
|
||||||
echo "deb http://ddebs.ubuntu.com $(lsb_release -cs) main restricted
|
echo "deb http://ddebs.ubuntu.com $(lsb_release -cs) main restricted
|
||||||
@ -185,24 +180,19 @@ prepare_system_openssl()
|
|||||||
deb http://ddebs.ubuntu.com $(lsb_release -cs)-proposed main restricted" | \
|
deb http://ddebs.ubuntu.com $(lsb_release -cs)-proposed main restricted" | \
|
||||||
sudo tee -a /etc/apt/sources.list.d/ddebs.list
|
sudo tee -a /etc/apt/sources.list.d/ddebs.list
|
||||||
sudo apt-get install -qq ubuntu-dbgsym-keyring
|
sudo apt-get install -qq ubuntu-dbgsym-keyring
|
||||||
if [ "$ID" = "ubuntu" -a "$VERSION_ID" = "24.04" ]; then
|
DEPS="$DEPS libssl3-dbgsym"
|
||||||
DEPS="$DEPS libssl3t64-dbgsym"
|
|
||||||
else
|
|
||||||
DEPS="$DEPS libssl3-dbgsym"
|
|
||||||
fi
|
|
||||||
fi
|
fi
|
||||||
if test "$LEAK_DETECTIVE" = "yes"; then
|
if test "$LEAK_DETECTIVE" = "yes"; then
|
||||||
# make sure we can properly whitelist functions with leak detective
|
# make sure we can properly whitelist functions with leak detective
|
||||||
DEPS="$DEPS binutils-dev"
|
DEPS="$DEPS binutils-dev"
|
||||||
CONFIG="$CONFIG --enable-bfd-backtraces"
|
CONFIG="$CONFIG --enable-bfd-backtraces"
|
||||||
elif [ "$ID" = "ubuntu" -a "$VERSION_ID" != "24.04" ]; then
|
else
|
||||||
# with ASan we have to use the (extremely) slow stack unwind as the
|
# with ASan we have to use the (extremely) slow stack unwind as the
|
||||||
# shipped version of the library is built with -fomit-frame-pointer
|
# shipped version of the library is built with -fomit-frame-pointer
|
||||||
export ASAN_OPTIONS=fast_unwind_on_malloc=0
|
export ASAN_OPTIONS=fast_unwind_on_malloc=0
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
: ${SRC_DIR=$PWD}
|
|
||||||
: ${BUILD_DIR=$PWD}
|
: ${BUILD_DIR=$PWD}
|
||||||
: ${DEPS_BUILD_DIR=$BUILD_DIR/..}
|
: ${DEPS_BUILD_DIR=$BUILD_DIR/..}
|
||||||
: ${DEPS_PREFIX=/usr/local}
|
: ${DEPS_PREFIX=/usr/local}
|
||||||
@ -223,13 +213,10 @@ case "$TEST" in
|
|||||||
default)
|
default)
|
||||||
# should be the default, but lets make sure
|
# should be the default, but lets make sure
|
||||||
CONFIG="--with-printf-hooks=glibc"
|
CONFIG="--with-printf-hooks=glibc"
|
||||||
if system_uses_openssl3; then
|
|
||||||
prepare_system_openssl $1
|
|
||||||
fi
|
|
||||||
;;
|
;;
|
||||||
openssl*)
|
openssl*)
|
||||||
CONFIG="--disable-defaults --enable-pki --enable-openssl --enable-pem --enable-drbg"
|
CONFIG="--disable-defaults --enable-pki --enable-openssl --enable-pem"
|
||||||
export TESTS_PLUGINS="test-vectors openssl! pem drbg"
|
export TESTS_PLUGINS="test-vectors openssl! pem"
|
||||||
DEPS="libssl-dev"
|
DEPS="libssl-dev"
|
||||||
if test "$TEST" = "openssl-3"; then
|
if test "$TEST" = "openssl-3"; then
|
||||||
DEPS=""
|
DEPS=""
|
||||||
@ -239,9 +226,6 @@ openssl*)
|
|||||||
use_custom_openssl $1
|
use_custom_openssl $1
|
||||||
elif system_uses_openssl3; then
|
elif system_uses_openssl3; then
|
||||||
prepare_system_openssl $1
|
prepare_system_openssl $1
|
||||||
else
|
|
||||||
# the kdf plugin is necessary to build against older OpenSSL versions
|
|
||||||
TESTS_PLUGINS="$TESTS_PLUGINS kdf"
|
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
gcrypt)
|
gcrypt)
|
||||||
@ -250,16 +234,16 @@ gcrypt)
|
|||||||
DEPS="libgcrypt20-dev"
|
DEPS="libgcrypt20-dev"
|
||||||
;;
|
;;
|
||||||
botan)
|
botan)
|
||||||
CONFIG="--disable-defaults --enable-pki --enable-botan --enable-pem --enable-hmac --enable-x509 --enable-constraints --enable-drbg"
|
CONFIG="--disable-defaults --enable-pki --enable-botan --enable-pem --enable-hmac --enable-x509 --enable-constraints"
|
||||||
export TESTS_PLUGINS="test-vectors botan! pem hmac x509 constraints drbg"
|
export TESTS_PLUGINS="test-vectors botan! pem hmac x509 constraints"
|
||||||
DEPS=""
|
DEPS=""
|
||||||
if test "$1" = "build-deps"; then
|
if test "$1" = "build-deps"; then
|
||||||
build_botan
|
build_botan
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
wolfssl)
|
wolfssl)
|
||||||
CONFIG="--disable-defaults --enable-pki --enable-wolfssl --enable-pem --enable-pkcs1 --enable-pkcs8 --enable-x509 --enable-constraints --enable-drbg"
|
CONFIG="--disable-defaults --enable-pki --enable-wolfssl --enable-pem --enable-pkcs1 --enable-pkcs8 --enable-x509 --enable-constraints"
|
||||||
export TESTS_PLUGINS="test-vectors wolfssl! pem pkcs1 pkcs8 x509 constraints drbg"
|
export TESTS_PLUGINS="test-vectors wolfssl! pem pkcs1 pkcs8 x509 constraints"
|
||||||
# build with custom options to enable all the features the plugin supports
|
# build with custom options to enable all the features the plugin supports
|
||||||
DEPS=""
|
DEPS=""
|
||||||
if test "$1" = "build-deps"; then
|
if test "$1" = "build-deps"; then
|
||||||
@ -268,11 +252,15 @@ wolfssl)
|
|||||||
;;
|
;;
|
||||||
printf-builtin)
|
printf-builtin)
|
||||||
CONFIG="--with-printf-hooks=builtin"
|
CONFIG="--with-printf-hooks=builtin"
|
||||||
if system_uses_openssl3; then
|
|
||||||
prepare_system_openssl $1
|
|
||||||
fi
|
|
||||||
;;
|
;;
|
||||||
all|alpine|codeql|coverage|sonarcloud|no-dbg|no-testable-ke)
|
all|codeql|coverage|sonarcloud|no-dbg)
|
||||||
|
if [ "$TEST" = "sonarcloud" ]; then
|
||||||
|
if [ -z "$SONAR_PROJECT" -o -z "$SONAR_ORGANIZATION" -o -z "$SONAR_TOKEN" ]; then
|
||||||
|
echo "The SONAR_PROJECT, SONAR_ORGANIZATION and SONAR_TOKEN" \
|
||||||
|
"environment variables are required to run this test"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
fi
|
||||||
if [ "$TEST" = "codeql" ]; then
|
if [ "$TEST" = "codeql" ]; then
|
||||||
# don't run tests, only analyze built code
|
# don't run tests, only analyze built code
|
||||||
TARGET=
|
TARGET=
|
||||||
@ -283,50 +271,33 @@ all|alpine|codeql|coverage|sonarcloud|no-dbg|no-testable-ke)
|
|||||||
CONFIG="--enable-all --disable-android-dns --disable-android-log
|
CONFIG="--enable-all --disable-android-dns --disable-android-log
|
||||||
--disable-kernel-pfroute --disable-keychain
|
--disable-kernel-pfroute --disable-keychain
|
||||||
--disable-lock-profiler --disable-padlock --disable-fuzzing
|
--disable-lock-profiler --disable-padlock --disable-fuzzing
|
||||||
--disable-osx-attr --disable-tkm
|
--disable-osx-attr --disable-tkm --disable-uci
|
||||||
--disable-unwind-backtraces
|
--disable-unwind-backtraces
|
||||||
--disable-svc --disable-dbghelp-backtraces --disable-socket-win
|
--disable-svc --disable-dbghelp-backtraces --disable-socket-win
|
||||||
--disable-kernel-wfp --disable-kernel-iph --disable-winhttp"
|
--disable-kernel-wfp --disable-kernel-iph --disable-winhttp
|
||||||
|
--disable-python-eggs-install"
|
||||||
# not enabled on the build server
|
# not enabled on the build server
|
||||||
CONFIG="$CONFIG --disable-af-alg"
|
CONFIG="$CONFIG --disable-af-alg"
|
||||||
|
# unable to build Botan on Ubuntu 20.04
|
||||||
|
if [ "$ID" = "ubuntu" -a "$VERSION_ID" = "20.04" ]; then
|
||||||
|
CONFIG="$CONFIG --disable-botan"
|
||||||
|
fi
|
||||||
if test "$TEST" != "coverage"; then
|
if test "$TEST" != "coverage"; then
|
||||||
CONFIG="$CONFIG --disable-coverage"
|
CONFIG="$CONFIG --disable-coverage"
|
||||||
else
|
else
|
||||||
|
# not actually required but configure checks for it
|
||||||
DEPS="$DEPS lcov"
|
DEPS="$DEPS lcov"
|
||||||
TARGET="coverage"
|
|
||||||
fi
|
fi
|
||||||
if [ "$TEST" = "no-testable-ke" ]; then
|
DEPS="$DEPS libcurl4-gnutls-dev libsoup2.4-dev libunbound-dev libldns-dev
|
||||||
CONFIG="$CONFIG --without-testable-ke"
|
|
||||||
fi
|
|
||||||
DEPS="$DEPS libcurl4-gnutls-dev libsoup-3.0-dev libunbound-dev libldns-dev
|
|
||||||
libmysqlclient-dev libsqlite3-dev clearsilver-dev libfcgi-dev
|
libmysqlclient-dev libsqlite3-dev clearsilver-dev libfcgi-dev
|
||||||
libldap2-dev libpcsclite-dev libpam0g-dev binutils-dev libnm-dev
|
libldap2-dev libpcsclite-dev libpam0g-dev binutils-dev libnm-dev
|
||||||
libgcrypt20-dev libjson-c-dev libtspi-dev libsystemd-dev
|
libgcrypt20-dev libjson-c-dev python3-pip libtspi-dev libsystemd-dev
|
||||||
libselinux1-dev libiptc-dev ruby-rubygems python3-build tox"
|
libselinux1-dev libiptc-dev"
|
||||||
if [ "$ID" = "ubuntu" -a "$VERSION_ID" = "22.04" -a "$1" = "build-deps" ]; then
|
PYDEPS="tox"
|
||||||
# python3-build is broken on 22.04 with venv (https://bugs.launchpad.net/ubuntu/+source/python-build/+bug/1992108)
|
|
||||||
# while installing python3-virtualenv should help, it doesn't. as even
|
|
||||||
# after uninstalling python3-venv, build prefers the latter
|
|
||||||
sudo python3 -m pip install --upgrade build
|
|
||||||
fi
|
|
||||||
if [ "$TEST" = "alpine" ]; then
|
|
||||||
# override the whole list for alpine
|
|
||||||
DEPS="git gmp-dev openldap-dev curl-dev ldns-dev unbound-dev libsoup3-dev
|
|
||||||
libxml2-dev tpm2-tss-dev tpm2-tss-sys mariadb-dev wolfssl-dev
|
|
||||||
libgcrypt-dev botan3-dev pcsc-lite-dev networkmanager-dev
|
|
||||||
linux-pam-dev iptables-dev libselinux-dev binutils-dev libunwind-dev
|
|
||||||
ruby py3-setuptools py3-build py3-tox"
|
|
||||||
# musl does not provide backtrace(), so use libunwind
|
|
||||||
CONFIG="$CONFIG --enable-unwind-backtraces"
|
|
||||||
# alpine doesn't have systemd
|
|
||||||
CONFIG="$CONFIG --disable-systemd --disable-cert-enroll-timer"
|
|
||||||
# no TrouSerS either
|
|
||||||
CONFIG="$CONFIG --disable-tss-trousers --disable-aikgen"
|
|
||||||
# and no Clearsilver
|
|
||||||
CONFIG="$CONFIG --disable-fast --disable-manager --disable-medsrv"
|
|
||||||
fi
|
|
||||||
if test "$1" = "build-deps"; then
|
if test "$1" = "build-deps"; then
|
||||||
build_botan
|
if [ "$ID" = "ubuntu" -a "$VERSION_ID" != "20.04" ]; then
|
||||||
|
build_botan
|
||||||
|
fi
|
||||||
build_wolfssl
|
build_wolfssl
|
||||||
build_tss2
|
build_tss2
|
||||||
fi
|
fi
|
||||||
@ -349,6 +320,13 @@ win*)
|
|||||||
TARGET=
|
TARGET=
|
||||||
else
|
else
|
||||||
CONFIG="$CONFIG --enable-openssl"
|
CONFIG="$CONFIG --enable-openssl"
|
||||||
|
case "$IMG" in
|
||||||
|
2015|2017)
|
||||||
|
# old OpenSSL versions don't provide HKDF
|
||||||
|
CONFIG="$CONFIG --enable-kdf"
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
|
||||||
CFLAGS="$CFLAGS -I$OPENSSL_DIR/include"
|
CFLAGS="$CFLAGS -I$OPENSSL_DIR/include"
|
||||||
LDFLAGS="-L$OPENSSL_DIR/lib"
|
LDFLAGS="-L$OPENSSL_DIR/lib"
|
||||||
case "$IMG" in
|
case "$IMG" in
|
||||||
@ -394,9 +372,10 @@ macos)
|
|||||||
--enable-socket-default --enable-sshkey --enable-stroke
|
--enable-socket-default --enable-sshkey --enable-stroke
|
||||||
--enable-swanctl --enable-unity --enable-updown
|
--enable-swanctl --enable-unity --enable-updown
|
||||||
--enable-x509 --enable-xauth-generic"
|
--enable-x509 --enable-xauth-generic"
|
||||||
DEPS="automake autoconf libtool bison gperf pkgconf openssl@1.1 curl"
|
DEPS="automake autoconf libtool bison gettext gperf pkg-config openssl@1.1 curl"
|
||||||
BREW_PREFIX=$(brew --prefix)
|
BREW_PREFIX=$(brew --prefix)
|
||||||
export PATH=$BREW_PREFIX/opt/bison/bin:$PATH
|
export PATH=$BREW_PREFIX/opt/bison/bin:$PATH
|
||||||
|
export ACLOCAL_PATH=$BREW_PREFIX/opt/gettext/share/aclocal:$ACLOCAL_PATH
|
||||||
for pkg in openssl@1.1 curl
|
for pkg in openssl@1.1 curl
|
||||||
do
|
do
|
||||||
PKG_CONFIG_PATH=$BREW_PREFIX/opt/$pkg/lib/pkgconfig:$PKG_CONFIG_PATH
|
PKG_CONFIG_PATH=$BREW_PREFIX/opt/$pkg/lib/pkgconfig:$PKG_CONFIG_PATH
|
||||||
@ -450,11 +429,7 @@ fuzzing)
|
|||||||
;;
|
;;
|
||||||
nm)
|
nm)
|
||||||
DEPS="gnome-common libsecret-1-dev libgtk-3-dev libnm-dev libnma-dev"
|
DEPS="gnome-common libsecret-1-dev libgtk-3-dev libnm-dev libnma-dev"
|
||||||
ORIG_SRC_DIR="$SRC_DIR"
|
cd src/frontends/gnome
|
||||||
SRC_DIR="$ORIG_SRC_DIR/src/frontends/gnome"
|
|
||||||
if [ "$ORIG_SRC_DIR" = "$BUILD_DIR" ]; then
|
|
||||||
BUILD_DIR="$SRC_DIR"
|
|
||||||
fi
|
|
||||||
# don't run ./configure with ./autogen.sh
|
# don't run ./configure with ./autogen.sh
|
||||||
export NOCONFIGURE=1
|
export NOCONFIGURE=1
|
||||||
;;
|
;;
|
||||||
@ -476,12 +451,8 @@ case "$1" in
|
|||||||
deps)
|
deps)
|
||||||
case "$OS_NAME" in
|
case "$OS_NAME" in
|
||||||
linux)
|
linux)
|
||||||
sudo apt-get update -y && \
|
sudo apt-get update -qq && \
|
||||||
sudo apt-get install -y automake autoconf libtool pkgconf bison flex gperf $DEPS
|
sudo apt-get install -qq bison flex gperf gettext $DEPS
|
||||||
;;
|
|
||||||
alpine)
|
|
||||||
apk add --no-cache build-base automake autoconf libtool pkgconfig && \
|
|
||||||
apk add --no-cache bison flex gperf tzdata $DEPS
|
|
||||||
;;
|
;;
|
||||||
macos)
|
macos)
|
||||||
brew update && \
|
brew update && \
|
||||||
@ -489,11 +460,15 @@ deps)
|
|||||||
;;
|
;;
|
||||||
freebsd)
|
freebsd)
|
||||||
pkg install -y automake autoconf libtool pkgconf && \
|
pkg install -y automake autoconf libtool pkgconf && \
|
||||||
pkg install -y bison flex gperf $DEPS
|
pkg install -y bison flex gperf gettext $DEPS
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
exit $?
|
exit $?
|
||||||
;;
|
;;
|
||||||
|
pydeps)
|
||||||
|
test -z "$PYDEPS" || pip3 -q install --user $PYDEPS
|
||||||
|
exit $?
|
||||||
|
;;
|
||||||
build-deps)
|
build-deps)
|
||||||
exit
|
exit
|
||||||
;;
|
;;
|
||||||
@ -509,28 +484,21 @@ CONFIG="$CONFIG
|
|||||||
--enable-leak-detective=${LEAK_DETECTIVE-no}"
|
--enable-leak-detective=${LEAK_DETECTIVE-no}"
|
||||||
|
|
||||||
case "$TEST" in
|
case "$TEST" in
|
||||||
alpine|codeql|coverage|freebsd|fuzzing|sonarcloud|win*)
|
codeql|coverage|freebsd|fuzzing|sonarcloud|win*)
|
||||||
# don't use AddressSanitizer if it's not available or causes conflicts
|
# don't use AddressSanitizer if it's not available or causes conflicts
|
||||||
CONFIG="$CONFIG --disable-asan"
|
CONFIG="$CONFIG --disable-asan"
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
if [ "$LEAK_DETECTIVE" != "yes" ]; then
|
if [ "$LEAK_DETECTIVE" != "yes" ]; then
|
||||||
CONFIG="$CONFIG --enable-asan"
|
CONFIG="$CONFIG --enable-asan"
|
||||||
else
|
|
||||||
CONFIG="$CONFIG --disable-asan"
|
|
||||||
fi
|
fi
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
cd $SRC_DIR
|
echo "$ ./autogen.sh"
|
||||||
if [ ! -f ./configure ]; then
|
./autogen.sh || exit $?
|
||||||
echo "$ ./autogen.sh"
|
|
||||||
./autogen.sh || exit $?
|
|
||||||
fi
|
|
||||||
|
|
||||||
cd $BUILD_DIR
|
|
||||||
echo "$ CC=$CC CFLAGS=\"$CFLAGS\" ./configure $CONFIG"
|
echo "$ CC=$CC CFLAGS=\"$CFLAGS\" ./configure $CONFIG"
|
||||||
CC="$CC" CFLAGS="$CFLAGS" $SRC_DIR/configure $CONFIG || exit $?
|
CC="$CC" CFLAGS="$CFLAGS" ./configure $CONFIG || exit $?
|
||||||
|
|
||||||
case "$TEST" in
|
case "$TEST" in
|
||||||
apidoc)
|
apidoc)
|
||||||
@ -545,10 +513,10 @@ case "$TEST" in
|
|||||||
sonarcloud)
|
sonarcloud)
|
||||||
# without target, coverage is currently not supported anyway because
|
# without target, coverage is currently not supported anyway because
|
||||||
# sonarqube only supports gcov, not lcov
|
# sonarqube only supports gcov, not lcov
|
||||||
build-wrapper-linux-x86-64 --out-dir $BUILD_WRAPPER_OUT_DIR make -j$(nproc) || exit $?
|
build-wrapper-linux-x86-64 --out-dir bw-output make -j4 || exit $?
|
||||||
;;
|
;;
|
||||||
*)
|
*)
|
||||||
make -j$(nproc) $TARGET || exit $?
|
make -j4 $TARGET || exit $?
|
||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
@ -560,9 +528,23 @@ apidoc)
|
|||||||
fi
|
fi
|
||||||
rm make.warnings
|
rm make.warnings
|
||||||
;;
|
;;
|
||||||
|
sonarcloud)
|
||||||
|
sonar-scanner \
|
||||||
|
-Dsonar.host.url=https://sonarcloud.io \
|
||||||
|
-Dsonar.projectKey=${SONAR_PROJECT} \
|
||||||
|
-Dsonar.organization=${SONAR_ORGANIZATION} \
|
||||||
|
-Dsonar.token=${SONAR_TOKEN} \
|
||||||
|
-Dsonar.projectVersion=$(git describe --exclude 'android-*')+${BUILD_NUMBER} \
|
||||||
|
-Dsonar.sources=. \
|
||||||
|
-Dsonar.cfamily.threads=2 \
|
||||||
|
-Dsonar.cfamily.analysisCache.mode=fs \
|
||||||
|
-Dsonar.cfamily.analysisCache.path=$HOME/.sonar-cache \
|
||||||
|
-Dsonar.cfamily.build-wrapper-output=bw-output || exit $?
|
||||||
|
rm -r bw-output .scannerwork
|
||||||
|
;;
|
||||||
android)
|
android)
|
||||||
rm -r strongswan-*
|
rm -r strongswan-*
|
||||||
cd $SRC_DIR/src/frontends/android
|
cd src/frontends/android
|
||||||
echo "$ ./gradlew build"
|
echo "$ ./gradlew build"
|
||||||
NDK_CCACHE=ccache ./gradlew build --info || exit $?
|
NDK_CCACHE=ccache ./gradlew build --info || exit $?
|
||||||
;;
|
;;
|
||||||
@ -570,7 +552,6 @@ android)
|
|||||||
;;
|
;;
|
||||||
esac
|
esac
|
||||||
|
|
||||||
cd $SRC_DIR
|
|
||||||
# ensure there are no unignored build artifacts (or other changes) in the Git repo
|
# ensure there are no unignored build artifacts (or other changes) in the Git repo
|
||||||
unclean="$(git status --porcelain)"
|
unclean="$(git status --porcelain)"
|
||||||
if test -n "$unclean"; then
|
if test -n "$unclean"; then
|
||||||
|
@ -1,5 +1,3 @@
|
|||||||
sonar.sources=.
|
|
||||||
|
|
||||||
# exclude these files completely
|
# exclude these files completely
|
||||||
sonar.exclusions=\
|
sonar.exclusions=\
|
||||||
src/manager/templates/static/jquery.js, \
|
src/manager/templates/static/jquery.js, \
|
||||||
@ -31,25 +29,16 @@ sonar.issue.ignore.allfile.a2.fileRegexp=made by GNU Bison
|
|||||||
sonar.issue.ignore.allfile.a3.fileRegexp=produced by gperf
|
sonar.issue.ignore.allfile.a3.fileRegexp=produced by gperf
|
||||||
|
|
||||||
# ignore some rules
|
# ignore some rules
|
||||||
sonar.issue.ignore.multicriteria=m1,m2,m3,m4,m5,m6,m7
|
sonar.issue.ignore.multicriteria=m1,m2,m3,m4,m5,m6
|
||||||
# Multiple variables should not be declared on the same line
|
|
||||||
sonar.issue.ignore.multicriteria.m1.ruleKey=c:S1659
|
sonar.issue.ignore.multicriteria.m1.ruleKey=c:S1659
|
||||||
sonar.issue.ignore.multicriteria.m1.resourceKey=**/*
|
sonar.issue.ignore.multicriteria.m1.resourceKey=**/*
|
||||||
# Functions should not be defined with a variable number of arguments
|
|
||||||
sonar.issue.ignore.multicriteria.m2.ruleKey=c:S923
|
sonar.issue.ignore.multicriteria.m2.ruleKey=c:S923
|
||||||
sonar.issue.ignore.multicriteria.m2.resourceKey=**/*
|
sonar.issue.ignore.multicriteria.m2.resourceKey=**/*
|
||||||
# Function names should be used either as a call with a parameter list or with the "&" operator
|
|
||||||
sonar.issue.ignore.multicriteria.m3.ruleKey=c:S936
|
sonar.issue.ignore.multicriteria.m3.ruleKey=c:S936
|
||||||
sonar.issue.ignore.multicriteria.m3.resourceKey=**/*
|
sonar.issue.ignore.multicriteria.m3.resourceKey=**/*
|
||||||
# Unused function parameters should be removed
|
|
||||||
sonar.issue.ignore.multicriteria.m4.ruleKey=c:S1172
|
sonar.issue.ignore.multicriteria.m4.ruleKey=c:S1172
|
||||||
sonar.issue.ignore.multicriteria.m4.resourceKey=**/*
|
sonar.issue.ignore.multicriteria.m4.resourceKey=**/*
|
||||||
# Single line comments should start with "--"
|
|
||||||
sonar.issue.ignore.multicriteria.m5.ruleKey=plsql:SingleLineCommentsSyntaxCheck
|
sonar.issue.ignore.multicriteria.m5.ruleKey=plsql:SingleLineCommentsSyntaxCheck
|
||||||
sonar.issue.ignore.multicriteria.m5.resourceKey=**/*
|
sonar.issue.ignore.multicriteria.m5.resourceKey=**/*
|
||||||
# User-defined types should not be passed as variadic arguments
|
|
||||||
sonar.issue.ignore.multicriteria.m6.ruleKey=c:S5270
|
sonar.issue.ignore.multicriteria.m6.ruleKey=c:S5270
|
||||||
sonar.issue.ignore.multicriteria.m6.resourceKey=**/*
|
sonar.issue.ignore.multicriteria.m6.resourceKey=**/*
|
||||||
# Loop variables should be declared in the minimal possible scope
|
|
||||||
sonar.issue.ignore.multicriteria.m7.ruleKey=c:S5955
|
|
||||||
sonar.issue.ignore.multicriteria.m7.resourceKey=**/*
|
|
||||||
|
@ -28,8 +28,6 @@ cert_install_availabledir = $(sysconfdir)/cert-enroll.d/cert-install-available
|
|||||||
cert_install_available_DATA = \
|
cert_install_available_DATA = \
|
||||||
cert-install-ssl \
|
cert-install-ssl \
|
||||||
cert-install-sssd \
|
cert-install-sssd \
|
||||||
cert-install-ldaputils \
|
|
||||||
cert-install-cockpit \
|
|
||||||
cert-install-dirsrv \
|
cert-install-dirsrv \
|
||||||
cert-install-lighttpd \
|
cert-install-lighttpd \
|
||||||
cert-install-openxpki \
|
cert-install-openxpki \
|
||||||
@ -43,10 +41,9 @@ cert-install-ipsec : cert-install-ipsec.in
|
|||||||
|
|
||||||
EXTRA_DIST = \
|
EXTRA_DIST = \
|
||||||
cert-enroll.conf cert-enroll.in cert-enroll.service.in cert-enroll.timer \
|
cert-enroll.conf cert-enroll.in cert-enroll.service.in cert-enroll.timer \
|
||||||
cert-install-cockpit cert-install-dirsrv cert-install-gitea \
|
cert-install-dirsrv cert-install-gitea cert-install-ipsec.in \
|
||||||
cert-install-ipsec.in cert-install-ldaputils cert-install-lighttpd \
|
cert-install-lighttpd cert-install-openxpki cert-install-ssl \
|
||||||
cert-install-openxpki cert-install-ssl cert-install-sssd \
|
cert-install-sssd cert-install-swanctl.in
|
||||||
cert-install-swanctl.in
|
|
||||||
|
|
||||||
man8_MANS = cert-enroll.8
|
man8_MANS = cert-enroll.8
|
||||||
|
|
||||||
|
@ -22,10 +22,8 @@
|
|||||||
: ${CAOUT=cacert}
|
: ${CAOUT=cacert}
|
||||||
: ${ROOTCA=$CAOUT.pem}
|
: ${ROOTCA=$CAOUT.pem}
|
||||||
: ${OLDROOTCA=$CAOUT-old.pem}
|
: ${OLDROOTCA=$CAOUT-old.pem}
|
||||||
: ${OLDERROOTCA=$CAOUT-older.pem}
|
|
||||||
: ${SUBCA=$CAOUT-1.pem}
|
: ${SUBCA=$CAOUT-1.pem}
|
||||||
: ${OLDSUBCA=$CAOUT-1-old.pem}
|
: ${OLDSUBCA=$CAOUT-1-old.pem}
|
||||||
: ${OLDERSUBCA=$CAOUT-1-older.pem}
|
|
||||||
: ${RAOUT=racert}
|
: ${RAOUT=racert}
|
||||||
: ${RACERT=$RAOUT.pem}
|
: ${RACERT=$RAOUT.pem}
|
||||||
|
|
||||||
|
@ -155,23 +155,15 @@ function check_ca_certs()
|
|||||||
if [ $ROOTCA_CHANGED -ne 0 ]
|
if [ $ROOTCA_CHANGED -ne 0 ]
|
||||||
then
|
then
|
||||||
echo "Warning: '$ROOTCA' has changed"
|
echo "Warning: '$ROOTCA' has changed"
|
||||||
if [ -s old/$ROOTCA ]
|
|
||||||
then
|
|
||||||
mv old/$ROOTCA older
|
|
||||||
fi
|
|
||||||
mv $ROOTCA old
|
mv $ROOTCA old
|
||||||
mv new/$ROOTCA .
|
mv new/$ROOTCA .
|
||||||
fi
|
fi
|
||||||
|
|
||||||
SUBCA_CHANGED=0
|
SUBCA_CHANGED=0
|
||||||
cmp -s $SUBCA new/$SUBCA || SUBCA_CHANGED=$?
|
cmp -s $SUBCA new/$SUBCA || SUBCA_CHANGE=$?
|
||||||
if [ $SUBCA_CHANGED -ne 0 ]
|
if [ $SUBCA_CHANGED -ne 0 ]
|
||||||
then
|
then
|
||||||
echo "Warning: '$SUBCA' has changed"
|
echo "Warning: '$SUBCA' has changed"
|
||||||
if [ -s old/$SUBCA ]
|
|
||||||
then
|
|
||||||
mv old/$SUBCA older
|
|
||||||
fi
|
|
||||||
mv $SUBCA old
|
mv $SUBCA old
|
||||||
mv new/$SUBCA .
|
mv new/$SUBCA .
|
||||||
fi
|
fi
|
||||||
@ -200,7 +192,6 @@ function install_certs()
|
|||||||
KEYTYPE="$KEYTYPE" CERTDIR="$CERTDIR" HOSTKEY="$HOSTKEY" \
|
KEYTYPE="$KEYTYPE" CERTDIR="$CERTDIR" HOSTKEY="$HOSTKEY" \
|
||||||
HOSTCERT="$HOSTCERT" ROOTCA="$ROOTCA" SUBCA="$SUBCA" \
|
HOSTCERT="$HOSTCERT" ROOTCA="$ROOTCA" SUBCA="$SUBCA" \
|
||||||
OLDROOTCA="$OLDROOTCA" OLDSUBCA="$OLDSUBCA" \
|
OLDROOTCA="$OLDROOTCA" OLDSUBCA="$OLDSUBCA" \
|
||||||
OLDERROOTCA="$OLDERROOTCA" OLDERSUBCA="$OLDERSUBCA" \
|
|
||||||
USER_GROUP="$USER_GROUP" SERVICE="$SERVICE" \
|
USER_GROUP="$USER_GROUP" SERVICE="$SERVICE" \
|
||||||
/bin/bash $script || status=$?
|
/bin/bash $script || status=$?
|
||||||
if [ $status -ne 0 ]
|
if [ $status -ne 0 ]
|
||||||
@ -260,7 +251,7 @@ esac
|
|||||||
##############################################################################
|
##############################################################################
|
||||||
# Create and change into certificates directory
|
# Create and change into certificates directory
|
||||||
#
|
#
|
||||||
mkdir -p $CERTDIR/new $CERTDIR/old $CERTDIR/older
|
mkdir -p $CERTDIR/new $CERTDIR/old
|
||||||
cd $CERTDIR
|
cd $CERTDIR
|
||||||
echo " changed into the '$CERTDIR' directory"
|
echo " changed into the '$CERTDIR' directory"
|
||||||
|
|
||||||
|
@ -1,50 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Install the generated key and certificate as TLS credentials for the Cockpit
|
|
||||||
# management interface.
|
|
||||||
#
|
|
||||||
# Copyright (C) 2024 Andreas Steffen
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to deal
|
|
||||||
# in the Software without restriction, including without limitation the rights
|
|
||||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
# copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
# THE SOFTWARE.
|
|
||||||
#
|
|
||||||
set -e
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Set local paths
|
|
||||||
#
|
|
||||||
|
|
||||||
# Path to the cockpit credentials
|
|
||||||
COCKPIT="/etc/cockpit/ws-certs.d"
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Change into the certificates directory
|
|
||||||
#
|
|
||||||
cd $CERTDIR
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Install the private key and certificate
|
|
||||||
#
|
|
||||||
cp $HOSTKEY $COCKPIT/ldap.key
|
|
||||||
cp $HOSTCERT $COCKPIT/ldap.crt
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Restart the cockpit systemd service
|
|
||||||
#
|
|
||||||
/usr/bin/systemctl restart cockpit.service
|
|
||||||
exit 0
|
|
||||||
|
|
@ -88,16 +88,6 @@ then
|
|||||||
$CERTUTIL -d . -A -t "CT,," -n "Old Sub CA" -i $CERTDIR/old/$SUBCA \
|
$CERTUTIL -d . -A -t "CT,," -n "Old Sub CA" -i $CERTDIR/old/$SUBCA \
|
||||||
-f passwd.txt
|
-f passwd.txt
|
||||||
fi
|
fi
|
||||||
if [ -s $CERTDIR/older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
$CERTUTIL -d . -A -t "CT,," -n "Older Root CA" -i $CERTDIR/older/$ROOTCA \
|
|
||||||
-f passwd.txt
|
|
||||||
fi
|
|
||||||
if [ -s $CERTDIR/older/$SUBCA ]
|
|
||||||
then
|
|
||||||
$CERTUTIL -d . -A -t "CT,," -n "Older Sub CA" -i $CERTDIR/older/$SUBCA \
|
|
||||||
-f passwd.txt
|
|
||||||
fi
|
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Move the generated credentials to the correct place and delete the build dir
|
# Move the generated credentials to the correct place and delete the build dir
|
||||||
|
@ -53,22 +53,6 @@ cp $HOSTCERT $IPSECDIR/certs
|
|||||||
# Install the CA certificates
|
# Install the CA certificates
|
||||||
#
|
#
|
||||||
cp $ROOTCA $SUBCA $IPSECDIR/cacerts
|
cp $ROOTCA $SUBCA $IPSECDIR/cacerts
|
||||||
if [ -s old/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cp old/$ROOTCA $IPSECDIR/cacerts/$OLDROOTCA
|
|
||||||
fi
|
|
||||||
if [ -s old/$SUBCA ]
|
|
||||||
then
|
|
||||||
cp old/$SUBCA $IPSECDIR/cacerts/$OLDSUBCA
|
|
||||||
fi
|
|
||||||
if [ -s older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cp older/$ROOTCA $IPSECDIR/cacerts/$OLDERROOTCA
|
|
||||||
fi
|
|
||||||
if [ -s older/$SUBCA ]
|
|
||||||
then
|
|
||||||
cp older/$SUBCA $IPSECDIR/cacerts/$OLDERSUBCA
|
|
||||||
fi
|
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Reload the strongSwan charon daemon if it is running
|
# Reload the strongSwan charon daemon if it is running
|
||||||
|
@ -1,64 +0,0 @@
|
|||||||
#!/bin/bash
|
|
||||||
# Concatenate the present and past CA certificates into a single TLS_CACERT
|
|
||||||
# file defined by ldap.conf so that the ldap-utils can verify the LDAP server
|
|
||||||
# certificate.
|
|
||||||
#
|
|
||||||
# Copyright (C) 2024 Andreas Steffen
|
|
||||||
#
|
|
||||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
||||||
# of this software and associated documentation files (the "Software"), to deal
|
|
||||||
# in the Software without restriction, including without limitation the rights
|
|
||||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
||||||
# copies of the Software, and to permit persons to whom the Software is
|
|
||||||
# furnished to do so, subject to the following conditions:
|
|
||||||
#
|
|
||||||
# The above copyright notice and this permission notice shall be included in
|
|
||||||
# all copies or substantial portions of the Software.
|
|
||||||
#
|
|
||||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
||||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
||||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
|
||||||
# THE SOFTWARE.
|
|
||||||
#
|
|
||||||
set -e
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Set some local paths
|
|
||||||
#
|
|
||||||
|
|
||||||
# Path to the LDAP configuration file
|
|
||||||
LDAP_CONF="/etc/ldap/ldap.conf"
|
|
||||||
|
|
||||||
# Extract or set path to the LDAP TLS CA cert directory
|
|
||||||
LDAP_TLS_CACERTS=$(awk '/TLS_CACERT/ {print $2}' $LDAP_CONF)
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Change into the certificate directory
|
|
||||||
#
|
|
||||||
cd $CERTDIR
|
|
||||||
|
|
||||||
##############################################################################
|
|
||||||
# Concatenate the CA certificates into a single file
|
|
||||||
#
|
|
||||||
cat $ROOTCA $SUBCA > $LDAP_TLS_CACERTS
|
|
||||||
if [ -s old/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cat old/$ROOTCA >> $LDAP_TLS_CACERTS
|
|
||||||
fi
|
|
||||||
if [ -s old/$SUBCA ]
|
|
||||||
then
|
|
||||||
cat old/$SUBCA >> $LDAP_TLS_CACERTS
|
|
||||||
fi
|
|
||||||
if [ -s older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cat older/$ROOTCA >> $LDAP_TLS_CACERTS
|
|
||||||
fi
|
|
||||||
if [ -s older/$SUBCA ]
|
|
||||||
then
|
|
||||||
cat older/$SUBCA >> $LDAP_TLS_CACERTS
|
|
||||||
fi
|
|
||||||
|
|
||||||
exit 0
|
|
@ -54,14 +54,6 @@ if [ -s old/$SUBCA ]
|
|||||||
then
|
then
|
||||||
cp old/$SUBCA $OPENXPKI_TLS/chain/$OLDSUBCA
|
cp old/$SUBCA $OPENXPKI_TLS/chain/$OLDSUBCA
|
||||||
fi
|
fi
|
||||||
if [ -s older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cp older/$ROOTCA $OPENXPKI_TLS/chain/$OLDERROOTCA
|
|
||||||
fi
|
|
||||||
if [ -s older/$SUBCA ]
|
|
||||||
then
|
|
||||||
cp older/$SUBCA $OPENXPKI_TLS/chain/$OLDERSUBCA
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -f $OPENXPKI_TLS/chain/*.0
|
rm -f $OPENXPKI_TLS/chain/*.0
|
||||||
|
|
||||||
|
@ -49,14 +49,6 @@ if [ -s $CERTDIR/old/$SUBCA ]
|
|||||||
then
|
then
|
||||||
cat $CERTDIR/old/$SUBCA >> $SSL_DIR/trusted.pem
|
cat $CERTDIR/old/$SUBCA >> $SSL_DIR/trusted.pem
|
||||||
fi
|
fi
|
||||||
if [ -s $CERTDIR/older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cat $CERTDIR/older/$ROOTCA >> $SSL_DIR/trusted.pem
|
|
||||||
fi
|
|
||||||
if [ -s $CERTDIR/older/$SUBCA ]
|
|
||||||
then
|
|
||||||
cat $CERTDIR/older/$SUBCA >> $SSL_DIR/trusted.pem
|
|
||||||
fi
|
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Restart the systemd service if it is active
|
# Restart the systemd service if it is active
|
||||||
|
@ -53,14 +53,6 @@ if [ -s old/$SUBCA ]
|
|||||||
then
|
then
|
||||||
cp old/$SUBCA $LDAP_TLS_CACERTDIR/$OLDSUBCA
|
cp old/$SUBCA $LDAP_TLS_CACERTDIR/$OLDSUBCA
|
||||||
fi
|
fi
|
||||||
if [ -s older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cp older/$ROOTCA $LDAP_TLS_CACERTDIR/$OLDERROOTCA
|
|
||||||
fi
|
|
||||||
if [ -s older/$SUBCA ]
|
|
||||||
then
|
|
||||||
cp older/$SUBCA $LDAP_TLS_CACERTDIR/$OLDERSUBCA
|
|
||||||
fi
|
|
||||||
|
|
||||||
rm -f $LDAP_TLS_CACERTDIR/*.0
|
rm -f $LDAP_TLS_CACERTDIR/*.0
|
||||||
|
|
||||||
|
@ -69,14 +69,6 @@ if [ -s old/$SUBCA ]
|
|||||||
then
|
then
|
||||||
cp old/$SUBCA ${SWANCTLDIR}/x509ca/$OLDSUBCA
|
cp old/$SUBCA ${SWANCTLDIR}/x509ca/$OLDSUBCA
|
||||||
fi
|
fi
|
||||||
if [ -s older/$ROOTCA ]
|
|
||||||
then
|
|
||||||
cp older/$ROOTCA ${SWANCTLDIR}/x509ca/$OLDERROOTCA
|
|
||||||
fi
|
|
||||||
if [ -s older/$SUBCA ]
|
|
||||||
then
|
|
||||||
cp older/$SUBCA ${SWANCTLDIR}/x509ca/$OLDERSUBCA
|
|
||||||
fi
|
|
||||||
|
|
||||||
##############################################################################
|
##############################################################################
|
||||||
# Reload the strongswan systemd service if it is running
|
# Reload the strongswan systemd service if it is running
|
||||||
|
@ -170,7 +170,7 @@ static peer_cfg_t* create_peer_cfg(private_cmd_connection_t *this)
|
|||||||
case PROF_V1_XAUTH_AM:
|
case PROF_V1_XAUTH_AM:
|
||||||
case PROF_V1_XAUTH_PSK_AM:
|
case PROF_V1_XAUTH_PSK_AM:
|
||||||
case PROF_V1_HYBRID_AM:
|
case PROF_V1_HYBRID_AM:
|
||||||
peer.options |= OPT_IKEV1_AGGRESSIVE;
|
peer.aggressive = TRUE;
|
||||||
/* FALL */
|
/* FALL */
|
||||||
case PROF_V1_PUB:
|
case PROF_V1_PUB:
|
||||||
case PROF_V1_XAUTH:
|
case PROF_V1_XAUTH:
|
||||||
@ -585,7 +585,7 @@ cmd_connection_t *cmd_connection_create()
|
|||||||
lib->processor->queue_job(lib->processor,
|
lib->processor->queue_job(lib->processor,
|
||||||
(job_t*)callback_job_create_with_prio(
|
(job_t*)callback_job_create_with_prio(
|
||||||
(callback_job_cb_t)initiate, this, NULL,
|
(callback_job_cb_t)initiate, this, NULL,
|
||||||
callback_job_cancel_thread, JOB_PRIO_CRITICAL));
|
(callback_job_cancel_t)return_false, JOB_PRIO_CRITICAL));
|
||||||
|
|
||||||
return &this->public;
|
return &this->public;
|
||||||
}
|
}
|
||||||
|
@ -205,29 +205,11 @@ int main(int argc, char *argv[])
|
|||||||
/* install routes via XFRM interfaces, if we can use them */
|
/* install routes via XFRM interfaces, if we can use them */
|
||||||
lib->settings->set_default_str(lib->settings,
|
lib->settings->set_default_str(lib->settings,
|
||||||
"charon-nm.plugins.kernel-netlink.install_routes_xfrmi", "yes");
|
"charon-nm.plugins.kernel-netlink.install_routes_xfrmi", "yes");
|
||||||
/* use a separate routing table to avoid conflicts with regular charon */
|
/* bypass IKE traffic from these routes in case traffic selectors conflict */
|
||||||
lib->settings->set_default_str(lib->settings,
|
lib->settings->set_default_str(lib->settings,
|
||||||
"charon-nm.routing_table", "210");
|
"charon-nm.plugins.socket-default.fwmark", "220");
|
||||||
/* use the same value as priority (higher than charon's default) */
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
lib->settings->set_default_str(lib->settings,
|
||||||
"charon-nm.routing_table_prio", "210");
|
"charon-nm.plugins.kernel-netlink.fwmark", "!220");
|
||||||
/* bypass IKE/ESP from these routes in case traffic selectors conflict */
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
|
||||||
"charon-nm.plugins.socket-default.fwmark", "210");
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
|
||||||
"charon-nm.plugins.kernel-netlink.fwmark", "!210");
|
|
||||||
|
|
||||||
/* trigger a DPD to verify the current path is working */
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
|
||||||
"charon-nm.check_current_path", "yes");
|
|
||||||
|
|
||||||
/* fail more quickly so users don't have to wait too long for a new SA */
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
|
||||||
"charon-nm.retransmit_tries", "3");
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
|
||||||
"charon-nm.retransmit_timeout", "2.0");
|
|
||||||
lib->settings->set_default_str(lib->settings,
|
|
||||||
"charon-nm.retransmit_base", "1.4");
|
|
||||||
|
|
||||||
DBG1(DBG_DMN, "Starting charon NetworkManager backend (strongSwan "VERSION")");
|
DBG1(DBG_DMN, "Starting charon NetworkManager backend (strongSwan "VERSION")");
|
||||||
if (lib->integrity)
|
if (lib->integrity)
|
||||||
|
@ -78,8 +78,7 @@ static job_requeue_t run(nm_backend_t *this)
|
|||||||
/**
|
/**
|
||||||
* Cancel the GLib Main Event Loop
|
* Cancel the GLib Main Event Loop
|
||||||
*/
|
*/
|
||||||
CALLBACK(cancel, bool,
|
static bool cancel(nm_backend_t *this)
|
||||||
nm_backend_t *this)
|
|
||||||
{
|
{
|
||||||
if (this->loop)
|
if (this->loop)
|
||||||
{
|
{
|
||||||
@ -153,7 +152,7 @@ static bool nm_backend_init()
|
|||||||
|
|
||||||
lib->processor->queue_job(lib->processor,
|
lib->processor->queue_job(lib->processor,
|
||||||
(job_t*)callback_job_create_with_prio((callback_job_cb_t)run, this,
|
(job_t*)callback_job_create_with_prio((callback_job_cb_t)run, this,
|
||||||
NULL, cancel, JOB_PRIO_CRITICAL));
|
NULL, (callback_job_cancel_t)cancel, JOB_PRIO_CRITICAL));
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,7 +195,7 @@ nm_handler_t *nm_handler_create()
|
|||||||
.public = {
|
.public = {
|
||||||
.handler = {
|
.handler = {
|
||||||
.handle = _handle,
|
.handle = _handle,
|
||||||
.release = (void*)nop,
|
.release = nop,
|
||||||
.create_attribute_enumerator = _create_attribute_enumerator,
|
.create_attribute_enumerator = _create_attribute_enumerator,
|
||||||
},
|
},
|
||||||
.create_enumerator = _create_enumerator,
|
.create_enumerator = _create_enumerator,
|
||||||
|
@ -214,10 +214,6 @@ static void signal_ip_config(NMVpnServicePlugin *plugin,
|
|||||||
|
|
||||||
handler = priv->handler;
|
handler = priv->handler;
|
||||||
|
|
||||||
/* we can reconnect automatically if interfaces change */
|
|
||||||
g_variant_builder_add (&builder, "{sv}", NM_VPN_PLUGIN_CAN_PERSIST,
|
|
||||||
g_variant_new_boolean (TRUE));
|
|
||||||
|
|
||||||
/* NM apparently requires to know the gateway (it uses it to install a
|
/* NM apparently requires to know the gateway (it uses it to install a
|
||||||
* direct route via physical interface if conflicting routes are passed) */
|
* direct route via physical interface if conflicting routes are passed) */
|
||||||
other = ike_sa->get_other_host(ike_sa);
|
other = ike_sa->get_other_host(ike_sa);
|
||||||
@ -678,51 +674,6 @@ static bool add_auth_cfg_pw(NMStrongswanPluginPrivate *priv,
|
|||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add traffic selectors to the given config, optionally parse them from a
|
|
||||||
* semicolon-separated list.
|
|
||||||
*/
|
|
||||||
static bool add_traffic_selectors(child_cfg_t *child_cfg, bool local,
|
|
||||||
const char *list, GError **err)
|
|
||||||
{
|
|
||||||
enumerator_t *enumerator;
|
|
||||||
traffic_selector_t *ts;
|
|
||||||
char *token;
|
|
||||||
|
|
||||||
if (list && strlen(list))
|
|
||||||
{
|
|
||||||
enumerator = enumerator_create_token(list, ";", "");
|
|
||||||
while (enumerator->enumerate(enumerator, &token))
|
|
||||||
{
|
|
||||||
ts = traffic_selector_create_from_cidr(token, 0, 0, 65535);
|
|
||||||
if (!ts)
|
|
||||||
{
|
|
||||||
g_set_error(err, NM_VPN_PLUGIN_ERROR,
|
|
||||||
NM_VPN_PLUGIN_ERROR_LAUNCH_FAILED,
|
|
||||||
"Invalid %s traffic selector '%s'.",
|
|
||||||
local ? "local" : "remote", token);
|
|
||||||
enumerator->destroy(enumerator);
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
child_cfg->add_traffic_selector(child_cfg, local, ts);
|
|
||||||
}
|
|
||||||
enumerator->destroy(enumerator);
|
|
||||||
}
|
|
||||||
else if (local)
|
|
||||||
{
|
|
||||||
ts = traffic_selector_create_dynamic(0, 0, 65535);
|
|
||||||
child_cfg->add_traffic_selector(child_cfg, TRUE, ts);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
ts = traffic_selector_create_from_cidr("0.0.0.0/0", 0, 0, 65535);
|
|
||||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
|
||||||
ts = traffic_selector_create_from_cidr("::/0", 0, 0, 65535);
|
|
||||||
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
|
||||||
}
|
|
||||||
return TRUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Connect function called from NM via DBUS
|
* Connect function called from NM via DBUS
|
||||||
*/
|
*/
|
||||||
@ -741,6 +692,7 @@ static gboolean connect_(NMVpnServicePlugin *plugin, NMConnection *connection,
|
|||||||
ike_cfg_t *ike_cfg;
|
ike_cfg_t *ike_cfg;
|
||||||
peer_cfg_t *peer_cfg;
|
peer_cfg_t *peer_cfg;
|
||||||
child_cfg_t *child_cfg;
|
child_cfg_t *child_cfg;
|
||||||
|
traffic_selector_t *ts;
|
||||||
ike_sa_t *ike_sa;
|
ike_sa_t *ike_sa;
|
||||||
auth_cfg_t *auth;
|
auth_cfg_t *auth;
|
||||||
certificate_t *cert = NULL;
|
certificate_t *cert = NULL;
|
||||||
@ -960,9 +912,10 @@ static gboolean connect_(NMVpnServicePlugin *plugin, NMConnection *connection,
|
|||||||
if (priv->xfrmi_id)
|
if (priv->xfrmi_id)
|
||||||
{ /* set the same mark as for IKE packets on the ESP packets so no routing
|
{ /* set the same mark as for IKE packets on the ESP packets so no routing
|
||||||
* loop is created if the TS covers the VPN server's IP */
|
* loop is created if the TS covers the VPN server's IP */
|
||||||
mark_from_string(lib->settings->get_str(lib->settings,
|
child.set_mark_out = (mark_t){
|
||||||
"charon-nm.plugins.socket-default.fwmark", NULL),
|
.value = 220,
|
||||||
MARK_OP_NONE, &child.set_mark_out);
|
.mask = 0xffffffff,
|
||||||
|
};
|
||||||
child.if_id_in = child.if_id_out = priv->xfrmi_id;
|
child.if_id_in = child.if_id_out = priv->xfrmi_id;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -993,22 +946,36 @@ static gboolean connect_(NMVpnServicePlugin *plugin, NMConnection *connection,
|
|||||||
child_cfg->add_proposal(child_cfg, proposal_create_default_aead(PROTO_ESP));
|
child_cfg->add_proposal(child_cfg, proposal_create_default_aead(PROTO_ESP));
|
||||||
child_cfg->add_proposal(child_cfg, proposal_create_default(PROTO_ESP));
|
child_cfg->add_proposal(child_cfg, proposal_create_default(PROTO_ESP));
|
||||||
}
|
}
|
||||||
|
ts = traffic_selector_create_dynamic(0, 0, 65535);
|
||||||
str = nm_setting_vpn_get_data_item(vpn, "local-ts");
|
child_cfg->add_traffic_selector(child_cfg, TRUE, ts);
|
||||||
if (!add_traffic_selectors(child_cfg, TRUE, str, err))
|
|
||||||
{
|
|
||||||
child_cfg->destroy(child_cfg);
|
|
||||||
peer_cfg->destroy(peer_cfg);
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
str = nm_setting_vpn_get_data_item(vpn, "remote-ts");
|
str = nm_setting_vpn_get_data_item(vpn, "remote-ts");
|
||||||
if (!add_traffic_selectors(child_cfg, FALSE, str, err))
|
if (str && strlen(str))
|
||||||
{
|
{
|
||||||
child_cfg->destroy(child_cfg);
|
enumerator = enumerator_create_token(str, ";", "");
|
||||||
peer_cfg->destroy(peer_cfg);
|
while (enumerator->enumerate(enumerator, &str))
|
||||||
return FALSE;
|
{
|
||||||
|
ts = traffic_selector_create_from_cidr((char*)str, 0, 0, 65535);
|
||||||
|
if (!ts)
|
||||||
|
{
|
||||||
|
g_set_error(err, NM_VPN_PLUGIN_ERROR,
|
||||||
|
NM_VPN_PLUGIN_ERROR_LAUNCH_FAILED,
|
||||||
|
"Invalid remote traffic selector.");
|
||||||
|
enumerator->destroy(enumerator);
|
||||||
|
child_cfg->destroy(child_cfg);
|
||||||
|
peer_cfg->destroy(peer_cfg);
|
||||||
|
return FALSE;
|
||||||
|
}
|
||||||
|
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||||
|
}
|
||||||
|
enumerator->destroy(enumerator);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
ts = traffic_selector_create_from_cidr("0.0.0.0/0", 0, 0, 65535);
|
||||||
|
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||||
|
ts = traffic_selector_create_from_cidr("::/0", 0, 0, 65535);
|
||||||
|
child_cfg->add_traffic_selector(child_cfg, FALSE, ts);
|
||||||
}
|
}
|
||||||
|
|
||||||
peer_cfg->add_child_cfg(peer_cfg, child_cfg);
|
peer_cfg->add_child_cfg(peer_cfg, child_cfg);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,7 +37,7 @@
|
|||||||
|
|
||||||
#include "tkm.h"
|
#include "tkm.h"
|
||||||
#include "tkm_nonceg.h"
|
#include "tkm_nonceg.h"
|
||||||
#include "tkm_key_exchange.h"
|
#include "tkm_diffie_hellman.h"
|
||||||
#include "tkm_keymat.h"
|
#include "tkm_keymat.h"
|
||||||
#include "tkm_listener.h"
|
#include "tkm_listener.h"
|
||||||
#include "tkm_kernel_ipsec.h"
|
#include "tkm_kernel_ipsec.h"
|
||||||
@ -318,9 +318,9 @@ int main(int argc, char *argv[])
|
|||||||
lib->plugins->add_static_features(lib->plugins, "tkm-backend", features,
|
lib->plugins->add_static_features(lib->plugins, "tkm-backend", features,
|
||||||
countof(features), TRUE, NULL, NULL);
|
countof(features), TRUE, NULL, NULL);
|
||||||
|
|
||||||
if (!register_ke_mapping())
|
if (!register_dh_mapping())
|
||||||
{
|
{
|
||||||
DBG1(DBG_DMN, "no KE group mapping defined - aborting %s", dmn_name);
|
DBG1(DBG_DMN, "no DH group mapping defined - aborting %s", dmn_name);
|
||||||
goto deinit;
|
goto deinit;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -410,7 +410,7 @@ int main(int argc, char *argv[])
|
|||||||
lib->encoding->remove_encoder(lib->encoding, tkm_encoder_encode);
|
lib->encoding->remove_encoder(lib->encoding, tkm_encoder_encode);
|
||||||
|
|
||||||
deinit:
|
deinit:
|
||||||
destroy_ke_mapping();
|
destroy_dh_mapping();
|
||||||
destroy_ca_mapping();
|
destroy_ca_mapping();
|
||||||
libcharon_deinit();
|
libcharon_deinit();
|
||||||
tkm_deinit();
|
tkm_deinit();
|
||||||
|
@ -25,9 +25,7 @@
|
|||||||
|
|
||||||
void charon_esa_acquire(result_type *res, const sp_id_type sp_id)
|
void charon_esa_acquire(result_type *res, const sp_id_type sp_id)
|
||||||
{
|
{
|
||||||
kernel_acquire_data_t data = {
|
kernel_acquire_data_t data = {};
|
||||||
.cpu = CPU_ID_MAX,
|
|
||||||
};
|
|
||||||
|
|
||||||
DBG1(DBG_KNL, "ees: acquire received for reqid %u", sp_id);
|
DBG1(DBG_KNL, "ees: acquire received for reqid %u", sp_id);
|
||||||
charon->kernel->acquire(charon->kernel, sp_id, &data);
|
charon->kernel->acquire(charon->kernel, sp_id, &data);
|
||||||
|
@ -83,10 +83,9 @@ bool tkm_init()
|
|||||||
}
|
}
|
||||||
|
|
||||||
/* get limits from tkm */
|
/* get limits from tkm */
|
||||||
if (ike_tkm_limits(&max_requests, &limits[TKM_CTX_NONCE], &limits[TKM_CTX_KE],
|
if (ike_tkm_limits(&max_requests, &limits[TKM_CTX_NONCE], &limits[TKM_CTX_DH],
|
||||||
&limits[TKM_CTX_CC], &limits[TKM_CTX_AE],
|
&limits[TKM_CTX_CC], &limits[TKM_CTX_AE],
|
||||||
&limits[TKM_CTX_ISA], &limits[TKM_CTX_ESA],
|
&limits[TKM_CTX_ISA], &limits[TKM_CTX_ESA]) != TKM_OK)
|
||||||
&limits[TKM_CTX_BLOB]) != TKM_OK)
|
|
||||||
{
|
{
|
||||||
ees_server_finalize();
|
ees_server_finalize();
|
||||||
tkmlib_final();
|
tkmlib_final();
|
||||||
|
@ -20,110 +20,92 @@
|
|||||||
|
|
||||||
#include "tkm.h"
|
#include "tkm.h"
|
||||||
#include "tkm_utils.h"
|
#include "tkm_utils.h"
|
||||||
#include "tkm_key_exchange.h"
|
#include "tkm_diffie_hellman.h"
|
||||||
|
|
||||||
#include <daemon.h>
|
#include <daemon.h>
|
||||||
#include <collections/hashtable.h>
|
#include <collections/hashtable.h>
|
||||||
|
|
||||||
typedef struct private_tkm_key_exchange_t private_tkm_key_exchange_t;
|
typedef struct private_tkm_diffie_hellman_t private_tkm_diffie_hellman_t;
|
||||||
|
|
||||||
static hashtable_t *method_map = NULL;
|
static hashtable_t *group_map = NULL;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Private data of a tkm_key_exchange_t object.
|
* Private data of a tkm_diffie_hellman_t object.
|
||||||
*/
|
*/
|
||||||
struct private_tkm_key_exchange_t {
|
struct private_tkm_diffie_hellman_t {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Public tkm_key_exchange_t interface.
|
* Public tkm_diffie_hellman_t interface.
|
||||||
*/
|
*/
|
||||||
tkm_key_exchange_t public;
|
tkm_diffie_hellman_t public;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Key exchange method identifier.
|
* Diffie-Hellman group number.
|
||||||
*/
|
*/
|
||||||
key_exchange_method_t method;
|
key_exchange_method_t group;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Key exchange algorithm ID corresponding to method.
|
* Diffie-Hellman public value.
|
||||||
*/
|
*/
|
||||||
uint64_t kea_id;
|
dh_pubvalue_type pubvalue;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Context id.
|
* Context id.
|
||||||
*/
|
*/
|
||||||
ke_id_type context_id;
|
dh_id_type context_id;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
METHOD(key_exchange_t, get_public_key, bool,
|
METHOD(key_exchange_t, get_public_key, bool,
|
||||||
private_tkm_key_exchange_t *this, chunk_t *value)
|
private_tkm_diffie_hellman_t *this, chunk_t *value)
|
||||||
{
|
{
|
||||||
blob_id_type pubvalue_id;
|
sequence_to_chunk(this->pubvalue.data, this->pubvalue.size, value);
|
||||||
blob_length_type pubvalue_length;
|
return TRUE;
|
||||||
bool ret = FALSE;
|
|
||||||
|
|
||||||
pubvalue_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_BLOB);
|
|
||||||
if (pubvalue_id)
|
|
||||||
{
|
|
||||||
ret = ike_ke_get(this->context_id, this->kea_id, pubvalue_id,
|
|
||||||
&pubvalue_length) == TKM_OK &&
|
|
||||||
blob_to_chunk(pubvalue_id, pubvalue_length, value);
|
|
||||||
|
|
||||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_BLOB, pubvalue_id);
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(key_exchange_t, get_shared_secret, bool,
|
METHOD(key_exchange_t, get_shared_secret, bool,
|
||||||
private_tkm_key_exchange_t *this, chunk_t *secret)
|
private_tkm_diffie_hellman_t *this, chunk_t *secret)
|
||||||
{
|
{
|
||||||
*secret = chunk_empty;
|
*secret = chunk_empty;
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(key_exchange_t, set_public_key, bool,
|
METHOD(key_exchange_t, set_public_key, bool,
|
||||||
private_tkm_key_exchange_t *this, chunk_t value)
|
private_tkm_diffie_hellman_t *this, chunk_t value)
|
||||||
{
|
{
|
||||||
blob_id_type pubvalue_id;
|
dh_pubvalue_type othervalue;
|
||||||
bool ret = FALSE;
|
|
||||||
|
|
||||||
if (!key_exchange_verify_pubkey(this->method, value))
|
if (!key_exchange_verify_pubkey(this->group, value) ||
|
||||||
|
value.len > sizeof(othervalue.data))
|
||||||
{
|
{
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
othervalue.size = value.len;
|
||||||
|
memcpy(&othervalue.data, value.ptr, value.len);
|
||||||
|
|
||||||
pubvalue_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_BLOB);
|
return ike_dh_generate_key(this->context_id, othervalue) == TKM_OK;
|
||||||
if (pubvalue_id)
|
|
||||||
{
|
|
||||||
ret = chunk_to_blob(pubvalue_id, &value) &&
|
|
||||||
ike_ke_set(this->context_id, this->kea_id, pubvalue_id) == TKM_OK;
|
|
||||||
|
|
||||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_BLOB, pubvalue_id);
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(key_exchange_t, get_method, key_exchange_method_t,
|
METHOD(key_exchange_t, get_method, key_exchange_method_t,
|
||||||
private_tkm_key_exchange_t *this)
|
private_tkm_diffie_hellman_t *this)
|
||||||
{
|
{
|
||||||
return this->method;
|
return this->group;
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(key_exchange_t, destroy, void,
|
METHOD(key_exchange_t, destroy, void,
|
||||||
private_tkm_key_exchange_t *this)
|
private_tkm_diffie_hellman_t *this)
|
||||||
{
|
{
|
||||||
if (ike_ke_reset(this->context_id) != TKM_OK)
|
if (ike_dh_reset(this->context_id) != TKM_OK)
|
||||||
{
|
{
|
||||||
DBG1(DBG_LIB, "failed to reset KE context %d", this->context_id);
|
DBG1(DBG_LIB, "failed to reset DH context %d", this->context_id);
|
||||||
}
|
}
|
||||||
|
|
||||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_KE, this->context_id);
|
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_DH, this->context_id);
|
||||||
free(this);
|
free(this);
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(tkm_key_exchange_t, get_id, ke_id_type,
|
METHOD(tkm_diffie_hellman_t, get_id, dh_id_type,
|
||||||
private_tkm_key_exchange_t *this)
|
private_tkm_diffie_hellman_t *this)
|
||||||
{
|
{
|
||||||
return this->context_id;
|
return this->context_id;
|
||||||
}
|
}
|
||||||
@ -142,7 +124,7 @@ static bool equals(void *key, void *other_key)
|
|||||||
/*
|
/*
|
||||||
* Described in header.
|
* Described in header.
|
||||||
*/
|
*/
|
||||||
int register_ke_mapping()
|
int register_dh_mapping()
|
||||||
{
|
{
|
||||||
int count, i;
|
int count, i;
|
||||||
char *iana_id_str, *tkm_id_str;
|
char *iana_id_str, *tkm_id_str;
|
||||||
@ -155,7 +137,7 @@ int register_ke_mapping()
|
|||||||
(hashtable_equals_t)equals, 16);
|
(hashtable_equals_t)equals, 16);
|
||||||
|
|
||||||
enumerator = lib->settings->create_key_value_enumerator(lib->settings,
|
enumerator = lib->settings->create_key_value_enumerator(lib->settings,
|
||||||
"%s.ke_mapping",
|
"%s.dh_mapping",
|
||||||
lib->ns);
|
lib->ns);
|
||||||
|
|
||||||
while (enumerator->enumerate(enumerator, &iana_id_str, &tkm_id_str))
|
while (enumerator->enumerate(enumerator, &iana_id_str, &tkm_id_str))
|
||||||
@ -171,7 +153,7 @@ int register_ke_mapping()
|
|||||||
|
|
||||||
count = map->get_count(map);
|
count = map->get_count(map);
|
||||||
plugin_feature_t f[count + 1];
|
plugin_feature_t f[count + 1];
|
||||||
f[0] = PLUGIN_REGISTER(KE, tkm_key_exchange_create);
|
f[0] = PLUGIN_REGISTER(KE, tkm_diffie_hellman_create);
|
||||||
|
|
||||||
i = 1;
|
i = 1;
|
||||||
enumerator = map->create_enumerator(map);
|
enumerator = map->create_enumerator(map);
|
||||||
@ -182,12 +164,12 @@ int register_ke_mapping()
|
|||||||
}
|
}
|
||||||
enumerator->destroy(enumerator);
|
enumerator->destroy(enumerator);
|
||||||
|
|
||||||
lib->plugins->add_static_features(lib->plugins, "tkm-ke", f, countof(f),
|
lib->plugins->add_static_features(lib->plugins, "tkm-dh", f, countof(f),
|
||||||
TRUE, NULL, NULL);
|
TRUE, NULL, NULL);
|
||||||
|
|
||||||
if (count > 0)
|
if (count > 0)
|
||||||
{
|
{
|
||||||
method_map = map;
|
group_map = map;
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
@ -200,33 +182,32 @@ int register_ke_mapping()
|
|||||||
/*
|
/*
|
||||||
* Described in header.
|
* Described in header.
|
||||||
*/
|
*/
|
||||||
void destroy_ke_mapping()
|
void destroy_dh_mapping()
|
||||||
{
|
{
|
||||||
enumerator_t *enumerator;
|
enumerator_t *enumerator;
|
||||||
char *key, *value;
|
char *key, *value;
|
||||||
|
|
||||||
if (method_map)
|
if (group_map)
|
||||||
{
|
{
|
||||||
enumerator = method_map->create_enumerator(method_map);
|
enumerator = group_map->create_enumerator(group_map);
|
||||||
while (enumerator->enumerate(enumerator, &key, &value))
|
while (enumerator->enumerate(enumerator, &key, &value))
|
||||||
{
|
{
|
||||||
free(key);
|
free(key);
|
||||||
free(value);
|
free(value);
|
||||||
}
|
}
|
||||||
enumerator->destroy(enumerator);
|
enumerator->destroy(enumerator);
|
||||||
method_map->destroy(method_map);
|
group_map->destroy(group_map);
|
||||||
method_map = NULL;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Described in header.
|
* Described in header.
|
||||||
*/
|
*/
|
||||||
tkm_key_exchange_t *tkm_key_exchange_create(key_exchange_method_t method)
|
tkm_diffie_hellman_t *tkm_diffie_hellman_create(key_exchange_method_t group)
|
||||||
{
|
{
|
||||||
private_tkm_key_exchange_t *this;
|
private_tkm_diffie_hellman_t *this;
|
||||||
|
|
||||||
if (!method_map)
|
if (!group_map)
|
||||||
{
|
{
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
@ -242,8 +223,8 @@ tkm_key_exchange_t *tkm_key_exchange_create(key_exchange_method_t method)
|
|||||||
},
|
},
|
||||||
.get_id = _get_id,
|
.get_id = _get_id,
|
||||||
},
|
},
|
||||||
.method = method,
|
.group = group,
|
||||||
.context_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_KE),
|
.context_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_DH),
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!this->context_id)
|
if (!this->context_id)
|
||||||
@ -252,14 +233,18 @@ tkm_key_exchange_t *tkm_key_exchange_create(key_exchange_method_t method)
|
|||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t *kea_id_ptr = method_map->get(method_map, &method);
|
uint64_t *dha_id = group_map->get(group_map, &group);
|
||||||
if (!kea_id_ptr)
|
if (!dha_id)
|
||||||
{
|
{
|
||||||
free(this);
|
free(this);
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
|
|
||||||
this->kea_id = *kea_id_ptr;
|
if (ike_dh_create(this->context_id, *dha_id, &this->pubvalue) != TKM_OK)
|
||||||
|
{
|
||||||
|
free(this);
|
||||||
|
return NULL;
|
||||||
|
}
|
||||||
|
|
||||||
return &this->public;
|
return &this->public;
|
||||||
}
|
}
|
@ -16,14 +16,14 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @defgroup tkm-ke key exchange
|
* @defgroup tkm-dh diffie hellman
|
||||||
* @{ @ingroup tkm
|
* @{ @ingroup tkm
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifndef TKM_KEY_EXCHANGE_H_
|
#ifndef TKM_DIFFIE_HELLMAN_H_
|
||||||
#define TKM_KEY_EXCHANGE_H_
|
#define TKM_DIFFIE_HELLMAN_H_
|
||||||
|
|
||||||
typedef struct tkm_key_exchange_t tkm_key_exchange_t;
|
typedef struct tkm_diffie_hellman_t tkm_diffie_hellman_t;
|
||||||
|
|
||||||
#include <library.h>
|
#include <library.h>
|
||||||
#include <tkm/types.h>
|
#include <tkm/types.h>
|
||||||
@ -31,7 +31,7 @@ typedef struct tkm_key_exchange_t tkm_key_exchange_t;
|
|||||||
/**
|
/**
|
||||||
* key_exchange_t implementation using the trusted key manager.
|
* key_exchange_t implementation using the trusted key manager.
|
||||||
*/
|
*/
|
||||||
struct tkm_key_exchange_t {
|
struct tkm_diffie_hellman_t {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implements key_exchange_t interface.
|
* Implements key_exchange_t interface.
|
||||||
@ -39,33 +39,33 @@ struct tkm_key_exchange_t {
|
|||||||
key_exchange_t ke;
|
key_exchange_t ke;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get Key Exchange context id.
|
* Get Diffie-Hellman context id.
|
||||||
*
|
*
|
||||||
* @return id of this KE context.
|
* @return id of this DH context.
|
||||||
*/
|
*/
|
||||||
ke_id_type (*get_id)(tkm_key_exchange_t * const this);
|
dh_id_type (*get_id)(tkm_diffie_hellman_t * const this);
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Loads IANA KE method identifier to TKM id mapping from config and registers
|
* Loads IANA DH group identifier to TKM id mapping from config and registers
|
||||||
* the corresponding KE plugin features.
|
* the corresponding DH features.
|
||||||
*
|
*
|
||||||
* @return number of registered mappings
|
* @return number of registered mappings
|
||||||
*/
|
*/
|
||||||
int register_ke_mapping();
|
int register_dh_mapping();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Destroy IANA KE method identifier to TKM id mapping.
|
* Destroy IANA DH group identifier to TKM id mapping.
|
||||||
*/
|
*/
|
||||||
void destroy_ke_mapping();
|
void destroy_dh_mapping();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates a new tkm_key_exchange_t object.
|
* Creates a new tkm_diffie_hellman_t object.
|
||||||
*
|
*
|
||||||
* @param method Key exchange method to use
|
* @param group Diffie Hellman group number to use
|
||||||
* @return tkm_key_exchange_t object, NULL if not supported
|
* @return tkm_diffie_hellman_t object, NULL if not supported
|
||||||
*/
|
*/
|
||||||
tkm_key_exchange_t *tkm_key_exchange_create(key_exchange_method_t method);
|
tkm_diffie_hellman_t *tkm_diffie_hellman_create(key_exchange_method_t group);
|
||||||
|
|
||||||
#endif /** TKM_KEY_EXCHANGE_H_ @}*/
|
#endif /** TKM_DIFFIE_HELLMAN_H_ @}*/
|
@ -20,15 +20,14 @@
|
|||||||
#include <utils/debug.h>
|
#include <utils/debug.h>
|
||||||
#include <threading/rwlock.h>
|
#include <threading/rwlock.h>
|
||||||
|
|
||||||
ENUM_BEGIN(tkm_context_kind_names, TKM_CTX_NONCE, TKM_CTX_BLOB,
|
ENUM_BEGIN(tkm_context_kind_names, TKM_CTX_NONCE, TKM_CTX_ESA,
|
||||||
"NONCE_CONTEXT",
|
"NONCE_CONTEXT",
|
||||||
"DH_CONTEXT",
|
"DH_CONTEXT",
|
||||||
"CC_CONTEXT",
|
"CC_CONTEXT",
|
||||||
"ISA_CONTEXT",
|
"ISA_CONTEXT",
|
||||||
"AE_CONTEXT",
|
"AE_CONTEXT",
|
||||||
"ESA_CONTEXT",
|
"ESA_CONTEXT");
|
||||||
"BLOB_CONTEXT");
|
ENUM_END(tkm_context_kind_names, TKM_CTX_ESA);
|
||||||
ENUM_END(tkm_context_kind_names, TKM_CTX_BLOB);
|
|
||||||
|
|
||||||
typedef struct private_tkm_id_manager_t private_tkm_id_manager_t;
|
typedef struct private_tkm_id_manager_t private_tkm_id_manager_t;
|
||||||
|
|
||||||
|
@ -34,8 +34,8 @@ typedef enum tkm_context_kind_t tkm_context_kind_t;
|
|||||||
enum tkm_context_kind_t {
|
enum tkm_context_kind_t {
|
||||||
/** Nonce context */
|
/** Nonce context */
|
||||||
TKM_CTX_NONCE,
|
TKM_CTX_NONCE,
|
||||||
/** Key Exchange context */
|
/** Diffie-Hellman context */
|
||||||
TKM_CTX_KE,
|
TKM_CTX_DH,
|
||||||
/** Certificate chain context */
|
/** Certificate chain context */
|
||||||
TKM_CTX_CC,
|
TKM_CTX_CC,
|
||||||
/** IKE SA context */
|
/** IKE SA context */
|
||||||
@ -44,8 +44,6 @@ enum tkm_context_kind_t {
|
|||||||
TKM_CTX_AE,
|
TKM_CTX_AE,
|
||||||
/** ESP SA context */
|
/** ESP SA context */
|
||||||
TKM_CTX_ESA,
|
TKM_CTX_ESA,
|
||||||
/** Blob context */
|
|
||||||
TKM_CTX_BLOB,
|
|
||||||
|
|
||||||
/** helper to determine the number of elements in this enum */
|
/** helper to determine the number of elements in this enum */
|
||||||
TKM_CTX_MAX,
|
TKM_CTX_MAX,
|
||||||
|
@ -93,7 +93,6 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
|||||||
kernel_ipsec_add_sa_t *data)
|
kernel_ipsec_add_sa_t *data)
|
||||||
{
|
{
|
||||||
esa_info_t esa;
|
esa_info_t esa;
|
||||||
esa_flags_type flags;
|
|
||||||
esp_spi_type spi_loc, spi_rem;
|
esp_spi_type spi_loc, spi_rem;
|
||||||
host_t *local, *peer;
|
host_t *local, *peer;
|
||||||
chunk_t *nonce_loc, *nonce_rem;
|
chunk_t *nonce_loc, *nonce_rem;
|
||||||
@ -108,31 +107,31 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
|||||||
}
|
}
|
||||||
esa = *(esa_info_t *)(data->enc_key.ptr);
|
esa = *(esa_info_t *)(data->enc_key.ptr);
|
||||||
|
|
||||||
/* only handle the case where we have both distinct ESP SPIs available,
|
/* only handle the case where we have both distinct ESP spi's available */
|
||||||
* which is always the outbound SA */
|
if (esa.spi_r == id->spi)
|
||||||
if (esa.spi_l == id->spi)
|
|
||||||
{
|
{
|
||||||
chunk_free(&esa.nonce_i);
|
chunk_free(&esa.nonce_i);
|
||||||
chunk_free(&esa.nonce_r);
|
chunk_free(&esa.nonce_r);
|
||||||
return SUCCESS;
|
return SUCCESS;
|
||||||
}
|
}
|
||||||
|
|
||||||
spi_loc = esa.spi_l;
|
|
||||||
spi_rem = id->spi;
|
|
||||||
local = id->src;
|
|
||||||
peer = id->dst;
|
|
||||||
|
|
||||||
if (data->initiator)
|
if (data->initiator)
|
||||||
{
|
{
|
||||||
|
spi_loc = id->spi;
|
||||||
|
spi_rem = esa.spi_r;
|
||||||
|
local = id->dst;
|
||||||
|
peer = id->src;
|
||||||
nonce_loc = &esa.nonce_i;
|
nonce_loc = &esa.nonce_i;
|
||||||
nonce_rem = &esa.nonce_r;
|
nonce_rem = &esa.nonce_r;
|
||||||
flags = TKM_ESA_INITIATOR;
|
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
|
spi_loc = esa.spi_r;
|
||||||
|
spi_rem = id->spi;
|
||||||
|
local = id->src;
|
||||||
|
peer = id->dst;
|
||||||
nonce_loc = &esa.nonce_r;
|
nonce_loc = &esa.nonce_r;
|
||||||
nonce_rem = &esa.nonce_i;
|
nonce_rem = &esa.nonce_i;
|
||||||
flags = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
esa_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_ESA);
|
esa_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_ESA);
|
||||||
@ -151,24 +150,24 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* creation of first CHILD SA:
|
* creation of first CHILD SA:
|
||||||
* no nonce and no ke contexts because the ones from the IKE SA are re-used
|
* no nonce and no dh contexts because the ones from the IKE SA are re-used
|
||||||
*/
|
*/
|
||||||
nonce_loc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce_loc);
|
nonce_loc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce_loc);
|
||||||
if (nonce_loc_id == 0 && esa.ke_ids.size == 0)
|
if (nonce_loc_id == 0 && esa.dh_id == 0)
|
||||||
{
|
{
|
||||||
if (ike_esa_create_first(esa_id, esa.isa_id, data->reqid, 1, flags,
|
if (ike_esa_create_first(esa_id, esa.isa_id, data->reqid, 1, spi_loc,
|
||||||
spi_loc, spi_rem) != TKM_OK)
|
spi_rem) != TKM_OK)
|
||||||
{
|
{
|
||||||
DBG1(DBG_KNL, "child SA (%llu, first) creation failed", esa_id);
|
DBG1(DBG_KNL, "child SA (%llu, first) creation failed", esa_id);
|
||||||
goto failure;
|
goto failure;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/* creation of child SA without PFS: no ke context */
|
/* creation of child SA without PFS: no dh context */
|
||||||
else if (nonce_loc_id != 0 && esa.ke_ids.size == 0)
|
else if (nonce_loc_id != 0 && esa.dh_id == 0)
|
||||||
{
|
{
|
||||||
chunk_to_sequence(nonce_rem, &nc_rem, sizeof(nonce_type));
|
chunk_to_sequence(nonce_rem, &nc_rem, sizeof(nonce_type));
|
||||||
if (ike_esa_create_no_pfs(esa_id, esa.isa_id, data->reqid, 1,
|
if (ike_esa_create_no_pfs(esa_id, esa.isa_id, data->reqid, 1,
|
||||||
nonce_loc_id, nc_rem, flags,
|
nonce_loc_id, nc_rem, data->initiator,
|
||||||
spi_loc, spi_rem) != TKM_OK)
|
spi_loc, spi_rem) != TKM_OK)
|
||||||
{
|
{
|
||||||
DBG1(DBG_KNL, "child SA (%llu, no PFS) creation failed", esa_id);
|
DBG1(DBG_KNL, "child SA (%llu, no PFS) creation failed", esa_id);
|
||||||
@ -177,12 +176,12 @@ METHOD(kernel_ipsec_t, add_sa, status_t,
|
|||||||
tkm->chunk_map->remove(tkm->chunk_map, nonce_loc);
|
tkm->chunk_map->remove(tkm->chunk_map, nonce_loc);
|
||||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nonce_loc_id);
|
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nonce_loc_id);
|
||||||
}
|
}
|
||||||
/* creation of subsequent child SA with PFS: nonce and ke context are set */
|
/* creation of subsequent child SA with PFS: nonce and dh context are set */
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
chunk_to_sequence(nonce_rem, &nc_rem, sizeof(nonce_type));
|
chunk_to_sequence(nonce_rem, &nc_rem, sizeof(nonce_type));
|
||||||
if (ike_esa_create(esa_id, esa.isa_id, data->reqid, 1, esa.ke_ids,
|
if (ike_esa_create(esa_id, esa.isa_id, data->reqid, 1, esa.dh_id,
|
||||||
nonce_loc_id, nc_rem, flags, spi_loc,
|
nonce_loc_id, nc_rem, data->initiator, spi_loc,
|
||||||
spi_rem) != TKM_OK)
|
spi_rem) != TKM_OK)
|
||||||
{
|
{
|
||||||
DBG1(DBG_KNL, "child SA (%llu) creation failed", esa_id);
|
DBG1(DBG_KNL, "child SA (%llu) creation failed", esa_id);
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2015-2020 Tobias Brunner
|
* Copyright (C) 2015 Tobias Brunner
|
||||||
* Copyright (C) 2012 Reto Buerki
|
* Copyright (C) 2012 Reto Buerki
|
||||||
* Copyright (C) 2012 Adrian-Ken Rueegsegger
|
* Copyright (C) 2012 Adrian-Ken Rueegsegger
|
||||||
*
|
*
|
||||||
@ -24,7 +24,7 @@
|
|||||||
#include "tkm.h"
|
#include "tkm.h"
|
||||||
#include "tkm_types.h"
|
#include "tkm_types.h"
|
||||||
#include "tkm_utils.h"
|
#include "tkm_utils.h"
|
||||||
#include "tkm_key_exchange.h"
|
#include "tkm_diffie_hellman.h"
|
||||||
#include "tkm_keymat.h"
|
#include "tkm_keymat.h"
|
||||||
#include "tkm_aead.h"
|
#include "tkm_aead.h"
|
||||||
|
|
||||||
@ -94,50 +94,33 @@ METHOD(keymat_t, create_nonce_gen, nonce_gen_t*,
|
|||||||
return lib->crypto->create_nonce_gen(lib->crypto);
|
return lib->crypto->create_nonce_gen(lib->crypto);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Concatenate the TKM KE IDs of the passed key exchanges
|
|
||||||
*/
|
|
||||||
static bool concat_ke_ids(array_t *kes, ke_ids_type *ids)
|
|
||||||
{
|
|
||||||
tkm_key_exchange_t *tkm_ke;
|
|
||||||
uint32_t i;
|
|
||||||
|
|
||||||
memset(ids, 0, sizeof(*ids));
|
|
||||||
ids->size = array_count(kes);
|
|
||||||
|
|
||||||
if (!ids->size || ids->size > 8)
|
|
||||||
{
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 0; i < ids->size; i++)
|
|
||||||
{
|
|
||||||
array_get(kes, i, &tkm_ke);
|
|
||||||
ids->data[i] = tkm_ke->get_id(tkm_ke);
|
|
||||||
}
|
|
||||||
return TRUE;
|
|
||||||
}
|
|
||||||
|
|
||||||
METHOD(keymat_v2_t, derive_ike_keys, bool,
|
METHOD(keymat_v2_t, derive_ike_keys, bool,
|
||||||
private_tkm_keymat_t *this, proposal_t *proposal, array_t *kes,
|
private_tkm_keymat_t *this, proposal_t *proposal, key_exchange_t *ke,
|
||||||
chunk_t nonce_i, chunk_t nonce_r, ike_sa_id_t *id,
|
chunk_t nonce_i, chunk_t nonce_r, ike_sa_id_t *id,
|
||||||
pseudo_random_function_t rekey_function, chunk_t rekey_skd)
|
pseudo_random_function_t rekey_function, chunk_t rekey_skd)
|
||||||
{
|
{
|
||||||
uint64_t nc_id = 0, spi_loc, spi_rem;
|
uint64_t nc_id, spi_loc, spi_rem;
|
||||||
chunk_t *nonce;
|
chunk_t *nonce;
|
||||||
ke_ids_type ke_ids;
|
tkm_diffie_hellman_t *tkm_dh;
|
||||||
|
dh_id_type dh_id;
|
||||||
nonce_type nonce_rem;
|
nonce_type nonce_rem;
|
||||||
result_type res;
|
result_type res;
|
||||||
block_len_type block_len;
|
block_len_type block_len;
|
||||||
icv_len_type icv_len;
|
icv_len_type icv_len;
|
||||||
iv_len_type iv_len;
|
iv_len_type iv_len;
|
||||||
|
|
||||||
if (!concat_ke_ids(kes, &ke_ids))
|
/* Acquire nonce context id */
|
||||||
|
nonce = this->initiator ? &nonce_i : &nonce_r;
|
||||||
|
nc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce);
|
||||||
|
if (!nc_id)
|
||||||
{
|
{
|
||||||
|
DBG1(DBG_IKE, "unable to acquire context id for nonce");
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
nonce = this->initiator ? &nonce_i : &nonce_r;
|
/* Get DH context id */
|
||||||
|
tkm_dh = (tkm_diffie_hellman_t *)ke;
|
||||||
|
dh_id = tkm_dh->get_id(tkm_dh);
|
||||||
|
|
||||||
if (this->initiator)
|
if (this->initiator)
|
||||||
{
|
{
|
||||||
@ -154,24 +137,16 @@ METHOD(keymat_v2_t, derive_ike_keys, bool,
|
|||||||
|
|
||||||
if (rekey_function == PRF_UNDEFINED)
|
if (rekey_function == PRF_UNDEFINED)
|
||||||
{
|
{
|
||||||
/* Acquire nonce context id */
|
|
||||||
nc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce);
|
|
||||||
if (!nc_id)
|
|
||||||
{
|
|
||||||
DBG1(DBG_IKE, "unable to acquire context id for nonce");
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
|
|
||||||
this->ae_ctx_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_AE);
|
this->ae_ctx_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_AE);
|
||||||
if (!this->ae_ctx_id)
|
if (!this->ae_ctx_id)
|
||||||
{
|
{
|
||||||
DBG1(DBG_IKE, "unable to acquire ae context id");
|
DBG1(DBG_IKE, "unable to acquire ae context id");
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
DBG1(DBG_IKE, "deriving IKE keys (nc: %llu, ke: %llu, spi_loc: %llx, "
|
DBG1(DBG_IKE, "deriving IKE keys (nc: %llu, dh: %llu, spi_loc: %llx, "
|
||||||
"spi_rem: %llx)", nc_id, ke_ids.data[0], spi_loc, spi_rem);
|
"spi_rem: %llx)", nc_id, dh_id, spi_loc, spi_rem);
|
||||||
res = ike_isa_create(this->isa_ctx_id, this->ae_ctx_id, 1, ke_ids.data[0],
|
res = ike_isa_create(this->isa_ctx_id, this->ae_ctx_id, 1, dh_id, nc_id,
|
||||||
nc_id, nonce_rem, this->initiator, spi_loc, spi_rem,
|
nonce_rem, this->initiator, spi_loc, spi_rem,
|
||||||
&block_len, &icv_len, &iv_len);
|
&block_len, &icv_len, &iv_len);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
@ -184,103 +159,77 @@ METHOD(keymat_v2_t, derive_ike_keys, bool,
|
|||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
isa_info = *((isa_info_t *)(rekey_skd.ptr));
|
isa_info = *((isa_info_t *)(rekey_skd.ptr));
|
||||||
|
DBG1(DBG_IKE, "deriving IKE keys (parent_isa: %llu, ae: %llu, nc: %llu,"
|
||||||
|
" dh: %llu, spi_loc: %llx, spi_rem: %llx)", isa_info.parent_isa_id,
|
||||||
|
isa_info.ae_id, nc_id, dh_id, spi_loc, spi_rem);
|
||||||
|
|
||||||
if (this->ae_ctx_id == isa_info.ae_id)
|
if (!tkm->idmgr->acquire_ref(tkm->idmgr, TKM_CTX_AE, isa_info.ae_id))
|
||||||
{
|
|
||||||
DBG1(DBG_IKE, "deriving IKE keys (parent_isa: %llu, ae: %llu, "
|
|
||||||
"ke: %llu, spi_loc: %llx, spi_rem: %llx)", isa_info.parent_isa_id,
|
|
||||||
isa_info.ae_id, ke_ids.data[0], spi_loc, spi_rem);
|
|
||||||
|
|
||||||
res = ike_isa_update(this->isa_ctx_id, ke_ids.data[0]);
|
|
||||||
}
|
|
||||||
else if (!(nc_id = tkm->chunk_map->get_id(tkm->chunk_map, nonce)))
|
|
||||||
{
|
|
||||||
DBG1(DBG_IKE, "unable to acquire context id for nonce");
|
|
||||||
return FALSE;
|
|
||||||
}
|
|
||||||
else if (!tkm->idmgr->acquire_ref(tkm->idmgr, TKM_CTX_AE, isa_info.ae_id))
|
|
||||||
{
|
{
|
||||||
DBG1(DBG_IKE, "unable to acquire reference for ae: %llu",
|
DBG1(DBG_IKE, "unable to acquire reference for ae: %llu",
|
||||||
isa_info.ae_id);
|
isa_info.ae_id);
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
else
|
this->ae_ctx_id = isa_info.ae_id;
|
||||||
{
|
res = ike_isa_create_child(this->isa_ctx_id, isa_info.parent_isa_id, 1,
|
||||||
DBG1(DBG_IKE, "deriving IKE keys (parent_isa: %llu, ae: %llu, nc: %llu, "
|
dh_id, nc_id, nonce_rem, this->initiator,
|
||||||
"ke: %llu, spi_loc: %llx, spi_rem: %llx)", isa_info.parent_isa_id,
|
spi_loc, spi_rem, &block_len, &icv_len,
|
||||||
isa_info.ae_id, nc_id, ke_ids.data[0], spi_loc, spi_rem);
|
&iv_len);
|
||||||
|
|
||||||
this->ae_ctx_id = isa_info.ae_id;
|
|
||||||
res = ike_isa_create_child(this->isa_ctx_id, isa_info.parent_isa_id, 1,
|
|
||||||
ke_ids, nc_id, nonce_rem, this->initiator,
|
|
||||||
spi_loc, spi_rem, &block_len, &icv_len,
|
|
||||||
&iv_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
chunk_free(&rekey_skd);
|
chunk_free(&rekey_skd);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (nc_id)
|
|
||||||
{
|
|
||||||
tkm->chunk_map->remove(tkm->chunk_map, nonce);
|
|
||||||
if (ike_nc_reset(nc_id) != TKM_OK)
|
|
||||||
{
|
|
||||||
DBG1(DBG_IKE, "failed to reset nonce context %llu", nc_id);
|
|
||||||
}
|
|
||||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nc_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (res != TKM_OK)
|
if (res != TKM_OK)
|
||||||
{
|
{
|
||||||
DBG1(DBG_IKE, "key derivation failed (isa: %llu)", this->isa_ctx_id);
|
DBG1(DBG_IKE, "key derivation failed (isa: %llu)", this->isa_ctx_id);
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!this->aead)
|
this->aead = tkm_aead_create(this->isa_ctx_id, block_len, icv_len, iv_len);
|
||||||
{
|
|
||||||
this->aead = tkm_aead_create(this->isa_ctx_id, block_len, icv_len,
|
|
||||||
iv_len);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* TODO: Add failure handler (see keymat_v2.c) */
|
/* TODO: Add failure handler (see keymat_v2.c) */
|
||||||
|
|
||||||
|
tkm->chunk_map->remove(tkm->chunk_map, nonce);
|
||||||
|
if (ike_nc_reset(nc_id) != TKM_OK)
|
||||||
|
{
|
||||||
|
DBG1(DBG_IKE, "failed to reset nonce context %llu", nc_id);
|
||||||
|
}
|
||||||
|
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_NONCE, nc_id);
|
||||||
|
|
||||||
return TRUE;
|
return TRUE;
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(keymat_v2_t, derive_child_keys, bool,
|
METHOD(keymat_v2_t, derive_child_keys, bool,
|
||||||
private_tkm_keymat_t *this, proposal_t *proposal, array_t *kes,
|
private_tkm_keymat_t *this, proposal_t *proposal, key_exchange_t *ke,
|
||||||
chunk_t nonce_i, chunk_t nonce_r, chunk_t *encr_i, chunk_t *integ_i,
|
chunk_t nonce_i, chunk_t nonce_r, chunk_t *encr_i, chunk_t *integ_i,
|
||||||
chunk_t *encr_r, chunk_t *integ_r)
|
chunk_t *encr_r, chunk_t *integ_r)
|
||||||
{
|
{
|
||||||
esa_info_t *esa_info_i, *esa_info_r;
|
esa_info_t *esa_info_i, *esa_info_r;
|
||||||
ke_ids_type ke_ids = {};
|
dh_id_type dh_id = 0;
|
||||||
|
|
||||||
if (kes && !concat_ke_ids(kes, &ke_ids))
|
if (ke)
|
||||||
{
|
{
|
||||||
return FALSE;
|
dh_id = ((tkm_diffie_hellman_t *)ke)->get_id((tkm_diffie_hellman_t *)ke);
|
||||||
}
|
}
|
||||||
|
|
||||||
INIT(esa_info_i,
|
INIT(esa_info_i,
|
||||||
.isa_id = this->isa_ctx_id,
|
.isa_id = this->isa_ctx_id,
|
||||||
.spi_l = proposal->get_spi(proposal),
|
.spi_r = proposal->get_spi(proposal),
|
||||||
.nonce_i = chunk_clone(nonce_i),
|
.nonce_i = chunk_clone(nonce_i),
|
||||||
.nonce_r = chunk_clone(nonce_r),
|
.nonce_r = chunk_clone(nonce_r),
|
||||||
.is_encr_r = FALSE,
|
.is_encr_r = FALSE,
|
||||||
.ke_ids = ke_ids,
|
.dh_id = dh_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
INIT(esa_info_r,
|
INIT(esa_info_r,
|
||||||
.isa_id = this->isa_ctx_id,
|
.isa_id = this->isa_ctx_id,
|
||||||
.spi_l = proposal->get_spi(proposal),
|
.spi_r = proposal->get_spi(proposal),
|
||||||
.nonce_i = chunk_clone(nonce_i),
|
.nonce_i = chunk_clone(nonce_i),
|
||||||
.nonce_r = chunk_clone(nonce_r),
|
.nonce_r = chunk_clone(nonce_r),
|
||||||
.is_encr_r = TRUE,
|
.is_encr_r = TRUE,
|
||||||
.ke_ids = ke_ids,
|
.dh_id = dh_id,
|
||||||
);
|
);
|
||||||
|
|
||||||
DBG1(DBG_CHD, "passing on esa info (isa: %llu, spi_l: %x, "
|
DBG1(DBG_CHD, "passing on esa info (isa: %llu, spi_r: %x, dh_id: %llu)",
|
||||||
"ke_id[%llu]: %llu)", esa_info_i->isa_id, ntohl(esa_info_i->spi_l),
|
esa_info_i->isa_id, ntohl(esa_info_i->spi_r), esa_info_i->dh_id);
|
||||||
esa_info_i->ke_ids.size, esa_info_i->ke_ids.data[0]);
|
|
||||||
|
|
||||||
/* store ESA info in encr_i/r, which is passed to add_sa */
|
/* store ESA info in encr_i/r, which is passed to add_sa */
|
||||||
*encr_i = chunk_create((u_char *)esa_info_i, sizeof(esa_info_t));
|
*encr_i = chunk_create((u_char *)esa_info_i, sizeof(esa_info_t));
|
||||||
@ -297,30 +246,10 @@ METHOD(keymat_t, get_aead, aead_t*,
|
|||||||
return this->aead;
|
return this->aead;
|
||||||
}
|
}
|
||||||
|
|
||||||
METHOD(keymat_v2_t, get_int_auth, bool,
|
|
||||||
private_tkm_keymat_t *this, bool verify, chunk_t data, chunk_t prev,
|
|
||||||
chunk_t *auth)
|
|
||||||
{
|
|
||||||
blob_id_type data_id;
|
|
||||||
bool ret = FALSE;
|
|
||||||
|
|
||||||
*auth = chunk_empty;
|
|
||||||
|
|
||||||
data_id = tkm->idmgr->acquire_id(tkm->idmgr, TKM_CTX_BLOB);
|
|
||||||
if (data_id)
|
|
||||||
{
|
|
||||||
ret = chunk_to_blob(data_id, &data) &&
|
|
||||||
ike_isa_int_auth(this->isa_ctx_id, verify, data_id) == TKM_OK;
|
|
||||||
|
|
||||||
tkm->idmgr->release_id(tkm->idmgr, TKM_CTX_BLOB, data_id);
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
METHOD(keymat_v2_t, get_auth_octets, bool,
|
METHOD(keymat_v2_t, get_auth_octets, bool,
|
||||||
private_tkm_keymat_t *this, bool verify, chunk_t ike_sa_init,
|
private_tkm_keymat_t *this, bool verify, chunk_t ike_sa_init,
|
||||||
chunk_t nonce, chunk_t int_auth, chunk_t ppk, identification_t *id,
|
chunk_t nonce, chunk_t ppk, identification_t *id, char reserved[3],
|
||||||
char reserved[3], chunk_t *octets, array_t *schemes)
|
chunk_t *octets, array_t *schemes)
|
||||||
{
|
{
|
||||||
sign_info_t *sign;
|
sign_info_t *sign;
|
||||||
|
|
||||||
@ -350,12 +279,6 @@ METHOD(keymat_v2_t, get_skd, pseudo_random_function_t,
|
|||||||
{
|
{
|
||||||
isa_info_t *isa_info;
|
isa_info_t *isa_info;
|
||||||
|
|
||||||
if (!this->ae_ctx_id)
|
|
||||||
{
|
|
||||||
*skd = chunk_empty;
|
|
||||||
return PRF_UNDEFINED;
|
|
||||||
}
|
|
||||||
|
|
||||||
INIT(isa_info,
|
INIT(isa_info,
|
||||||
.parent_isa_id = this->isa_ctx_id,
|
.parent_isa_id = this->isa_ctx_id,
|
||||||
.ae_id = this->ae_ctx_id,
|
.ae_id = this->ae_ctx_id,
|
||||||
@ -368,8 +291,8 @@ METHOD(keymat_v2_t, get_skd, pseudo_random_function_t,
|
|||||||
|
|
||||||
METHOD(keymat_v2_t, get_psk_sig, bool,
|
METHOD(keymat_v2_t, get_psk_sig, bool,
|
||||||
private_tkm_keymat_t *this, bool verify, chunk_t ike_sa_init, chunk_t nonce,
|
private_tkm_keymat_t *this, bool verify, chunk_t ike_sa_init, chunk_t nonce,
|
||||||
chunk_t int_auth, chunk_t secret, chunk_t ppk, identification_t *id,
|
chunk_t secret, chunk_t ppk, identification_t *id, char reserved[3],
|
||||||
char reserved[3], chunk_t *sig)
|
chunk_t *sig)
|
||||||
{
|
{
|
||||||
return FALSE;
|
return FALSE;
|
||||||
}
|
}
|
||||||
@ -465,7 +388,6 @@ tkm_keymat_t *tkm_keymat_create(bool initiator)
|
|||||||
.derive_ike_keys_ppk = (void*)return_false,
|
.derive_ike_keys_ppk = (void*)return_false,
|
||||||
.derive_child_keys = _derive_child_keys,
|
.derive_child_keys = _derive_child_keys,
|
||||||
.get_skd = _get_skd,
|
.get_skd = _get_skd,
|
||||||
.get_int_auth = _get_int_auth,
|
|
||||||
.get_auth_octets = _get_auth_octets,
|
.get_auth_octets = _get_auth_octets,
|
||||||
.get_psk_sig = _get_psk_sig,
|
.get_psk_sig = _get_psk_sig,
|
||||||
.add_hash_algorithm = _add_hash_algorithm,
|
.add_hash_algorithm = _add_hash_algorithm,
|
||||||
|
@ -49,9 +49,9 @@ struct esa_info_t {
|
|||||||
isa_id_type isa_id;
|
isa_id_type isa_id;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Local SPI of child SA.
|
* Responder SPI of child SA.
|
||||||
*/
|
*/
|
||||||
esp_spi_type spi_l;
|
esp_spi_type spi_r;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Initiator nonce.
|
* Initiator nonce.
|
||||||
@ -70,9 +70,9 @@ struct esa_info_t {
|
|||||||
bool is_encr_r;
|
bool is_encr_r;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Key Exchange context ids.
|
* Diffie-Hellman context id.
|
||||||
*/
|
*/
|
||||||
ke_ids_type ke_ids;
|
dh_id_type dh_id;
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -17,9 +17,6 @@
|
|||||||
|
|
||||||
#include <utils/debug.h>
|
#include <utils/debug.h>
|
||||||
|
|
||||||
#include <tkm/client.h>
|
|
||||||
#include <tkm/constants.h>
|
|
||||||
|
|
||||||
#include "tkm_utils.h"
|
#include "tkm_utils.h"
|
||||||
|
|
||||||
/* Generic variable-length sequence */
|
/* Generic variable-length sequence */
|
||||||
@ -55,48 +52,3 @@ void chunk_to_sequence(const chunk_t * const chunk, void *sequence,
|
|||||||
}
|
}
|
||||||
memcpy(seq->data, chunk->ptr, seq->size);
|
memcpy(seq->data, chunk->ptr, seq->size);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool blob_to_chunk(blob_id_type id, blob_length_type len, chunk_t * const chunk)
|
|
||||||
{
|
|
||||||
blob_offset_type offset = 0;
|
|
||||||
bool ret = TRUE;
|
|
||||||
|
|
||||||
*chunk = chunk_alloc(len);
|
|
||||||
|
|
||||||
while (len > 0 && ret)
|
|
||||||
{
|
|
||||||
blob_out_bytes_type blob_data;
|
|
||||||
blob_length_type slice_len = min(len, sizeof(blob_data.data));
|
|
||||||
|
|
||||||
ret = ike_blob_read(id, offset, slice_len, &blob_data) == TKM_OK;
|
|
||||||
memcpy(chunk->ptr + offset, blob_data.data, slice_len);
|
|
||||||
offset += slice_len;
|
|
||||||
len -= slice_len;
|
|
||||||
}
|
|
||||||
|
|
||||||
ike_blob_reset(id);
|
|
||||||
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool chunk_to_blob(blob_id_type id, const chunk_t * const chunk)
|
|
||||||
{
|
|
||||||
blob_length_type len = chunk->len;
|
|
||||||
blob_offset_type offset = 0;
|
|
||||||
bool ret;
|
|
||||||
|
|
||||||
ret = ike_blob_create(id, len) == TKM_OK;
|
|
||||||
|
|
||||||
while (len > 0 && ret)
|
|
||||||
{
|
|
||||||
blob_in_bytes_type blob_data;
|
|
||||||
blob_length_type slice_len = min(len, sizeof(blob_data.data));
|
|
||||||
|
|
||||||
memcpy(blob_data.data, chunk->ptr + offset, slice_len);
|
|
||||||
blob_data.size = slice_len;
|
|
||||||
ret = ike_blob_write(id, offset, blob_data) == TKM_OK;
|
|
||||||
offset += slice_len;
|
|
||||||
len -= slice_len;
|
|
||||||
}
|
|
||||||
return ret;
|
|
||||||
}
|
|
||||||
|
@ -46,21 +46,4 @@ void sequence_to_chunk(const byte_t * const first, const uint32_t len,
|
|||||||
void chunk_to_sequence(const chunk_t * const chunk, void *sequence,
|
void chunk_to_sequence(const chunk_t * const chunk, void *sequence,
|
||||||
const uint32_t typelen);
|
const uint32_t typelen);
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert blob to chunk and reset the blob.
|
|
||||||
*
|
|
||||||
* @param id id of blob
|
|
||||||
* @param len length of blob
|
|
||||||
* @param chunk pointer to chunk struct
|
|
||||||
*/
|
|
||||||
bool blob_to_chunk(blob_id_type id, blob_length_type len, chunk_t * const chunk);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Convert chunk to newly created blob.
|
|
||||||
*
|
|
||||||
* @param id id of blob
|
|
||||||
* @param chunk pointer to chunk struct
|
|
||||||
*/
|
|
||||||
bool chunk_to_blob(blob_id_type id, const chunk_t * const chunk);
|
|
||||||
|
|
||||||
#endif /** TKM_UTILS_H_ @}*/
|
#endif /** TKM_UTILS_H_ @}*/
|
||||||
|
@ -18,31 +18,31 @@
|
|||||||
#include <daemon.h>
|
#include <daemon.h>
|
||||||
#include <tests/test_suite.h>
|
#include <tests/test_suite.h>
|
||||||
|
|
||||||
#include "tkm_key_exchange.h"
|
#include "tkm_diffie_hellman.h"
|
||||||
|
|
||||||
START_TEST(test_ke_creation)
|
START_TEST(test_dh_creation)
|
||||||
{
|
{
|
||||||
tkm_key_exchange_t *ke = NULL;
|
tkm_diffie_hellman_t *dh = NULL;
|
||||||
|
|
||||||
ke = tkm_key_exchange_create(MODP_768_BIT);
|
dh = tkm_diffie_hellman_create(MODP_768_BIT);
|
||||||
fail_if(ke, "MODP_768 created");
|
fail_if(dh, "MODP_768 created");
|
||||||
|
|
||||||
ke = tkm_key_exchange_create(MODP_4096_BIT);
|
dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||||
fail_if(!ke, "MODP_4096 not created");
|
fail_if(!dh, "MODP_4096 not created");
|
||||||
fail_if(!ke->get_id(ke), "Invalid context id (0)");
|
fail_if(!dh->get_id(dh), "Invalid context id (0)");
|
||||||
|
|
||||||
ke->ke.destroy(&ke->ke);
|
dh->ke.destroy(&dh->ke);
|
||||||
}
|
}
|
||||||
END_TEST
|
END_TEST
|
||||||
|
|
||||||
START_TEST(test_ke_get_my_pubvalue)
|
START_TEST(test_dh_get_my_pubvalue)
|
||||||
{
|
{
|
||||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_4096_BIT);
|
tkm_diffie_hellman_t *dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||||
fail_if(!ke, "Unable to create KE");
|
fail_if(!dh, "Unable to create DH");
|
||||||
|
|
||||||
chunk_t value;
|
chunk_t value;
|
||||||
ck_assert(ke->ke.get_public_key(&ke->ke, &value));
|
ck_assert(dh->ke.get_public_key(&dh->ke, &value));
|
||||||
ke->ke.destroy(&ke->ke);
|
dh->ke.destroy(&dh->ke);
|
||||||
|
|
||||||
fail_if(value.ptr == NULL, "Pubvalue is NULL");
|
fail_if(value.ptr == NULL, "Pubvalue is NULL");
|
||||||
fail_if(value.len != 512, "Pubvalue size mismatch");
|
fail_if(value.len != 512, "Pubvalue size mismatch");
|
||||||
@ -51,19 +51,19 @@ START_TEST(test_ke_get_my_pubvalue)
|
|||||||
}
|
}
|
||||||
END_TEST
|
END_TEST
|
||||||
|
|
||||||
Suite *make_key_exchange_tests()
|
Suite *make_diffie_hellman_tests()
|
||||||
{
|
{
|
||||||
Suite *s;
|
Suite *s;
|
||||||
TCase *tc;
|
TCase *tc;
|
||||||
|
|
||||||
s = suite_create("key exchange");
|
s = suite_create("Diffie-Hellman");
|
||||||
|
|
||||||
tc = tcase_create("creation");
|
tc = tcase_create("creation");
|
||||||
tcase_add_test(tc, test_ke_creation);
|
tcase_add_test(tc, test_dh_creation);
|
||||||
suite_add_tcase(s, tc);
|
suite_add_tcase(s, tc);
|
||||||
|
|
||||||
tc = tcase_create("get_my_pubvalue");
|
tc = tcase_create("get_my_pubvalue");
|
||||||
tcase_add_test(tc, test_ke_get_my_pubvalue);
|
tcase_add_test(tc, test_dh_get_my_pubvalue);
|
||||||
suite_add_tcase(s, tc);
|
suite_add_tcase(s, tc);
|
||||||
|
|
||||||
return s;
|
return s;
|
@ -19,7 +19,7 @@
|
|||||||
|
|
||||||
#include "tkm_id_manager.h"
|
#include "tkm_id_manager.h"
|
||||||
|
|
||||||
static const tkm_limits_t limits = {125, 100, 55, 30, 200, 42, 21};
|
static const tkm_limits_t limits = {125, 100, 55, 30, 200, 42};
|
||||||
|
|
||||||
START_TEST(test_id_mgr_creation)
|
START_TEST(test_id_mgr_creation)
|
||||||
{
|
{
|
||||||
|
@ -24,7 +24,7 @@
|
|||||||
|
|
||||||
#include "tkm.h"
|
#include "tkm.h"
|
||||||
#include "tkm_nonceg.h"
|
#include "tkm_nonceg.h"
|
||||||
#include "tkm_key_exchange.h"
|
#include "tkm_diffie_hellman.h"
|
||||||
#include "tkm_keymat.h"
|
#include "tkm_keymat.h"
|
||||||
#include "tkm_types.h"
|
#include "tkm_types.h"
|
||||||
|
|
||||||
@ -47,20 +47,17 @@ START_TEST(test_derive_ike_keys)
|
|||||||
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
||||||
"Unable to allocate nonce");
|
"Unable to allocate nonce");
|
||||||
|
|
||||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_4096_BIT);
|
tkm_diffie_hellman_t *dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||||
fail_if(!ke, "Unable to create KE");
|
fail_if(!dh, "Unable to create DH");
|
||||||
|
|
||||||
/* Use the same pubvalue for both sides */
|
/* Use the same pubvalue for both sides */
|
||||||
chunk_t pubvalue;
|
chunk_t pubvalue;
|
||||||
ck_assert(ke->ke.get_public_key(&ke->ke, &pubvalue));
|
ck_assert(dh->ke.get_public_key(&dh->ke, &pubvalue));
|
||||||
ck_assert(ke->ke.set_public_key(&ke->ke, pubvalue));
|
ck_assert(dh->ke.set_public_key(&dh->ke, pubvalue));
|
||||||
|
|
||||||
array_t *kes = NULL;
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
|
||||||
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
||||||
kes, nonce, nonce, ike_sa_id, PRF_UNDEFINED, chunk_empty),
|
&dh->ke, nonce, nonce, ike_sa_id, PRF_UNDEFINED, chunk_empty),
|
||||||
"Key derivation failed");
|
"Key derivation failed");
|
||||||
array_destroy(kes);
|
|
||||||
chunk_free(&nonce);
|
chunk_free(&nonce);
|
||||||
|
|
||||||
aead_t * const aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
aead_t * const aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
||||||
@ -73,132 +70,17 @@ START_TEST(test_derive_ike_keys)
|
|||||||
|
|
||||||
ng->nonce_gen.destroy(&ng->nonce_gen);
|
ng->nonce_gen.destroy(&ng->nonce_gen);
|
||||||
proposal->destroy(proposal);
|
proposal->destroy(proposal);
|
||||||
ke->ke.destroy(&ke->ke);
|
dh->ke.destroy(&dh->ke);
|
||||||
ike_sa_id->destroy(ike_sa_id);
|
ike_sa_id->destroy(ike_sa_id);
|
||||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
||||||
chunk_free(&pubvalue);
|
chunk_free(&pubvalue);
|
||||||
}
|
}
|
||||||
END_TEST
|
END_TEST
|
||||||
|
|
||||||
START_TEST(test_derive_ike_keys_multi_ke)
|
|
||||||
{
|
|
||||||
proposal_t *proposal = proposal_create_from_string(PROTO_IKE,
|
|
||||||
"aes256-sha512-modp3072-ke1_modp4096");
|
|
||||||
fail_if(!proposal, "Unable to create proposal");
|
|
||||||
ike_sa_id_t *ike_sa_id = ike_sa_id_create(IKEV2_MAJOR_VERSION,
|
|
||||||
123912312312, 32312313122, TRUE);
|
|
||||||
fail_if(!ike_sa_id, "Unable to create IKE SA ID");
|
|
||||||
|
|
||||||
tkm_keymat_t *keymat = tkm_keymat_create(TRUE);
|
|
||||||
fail_if(!keymat, "Unable to create keymat");
|
|
||||||
fail_if(!keymat->get_isa_id(keymat), "Invalid ISA context id (0)");
|
|
||||||
|
|
||||||
chunk_t nonce;
|
|
||||||
tkm_nonceg_t *ng = tkm_nonceg_create();
|
|
||||||
fail_if(!ng, "Unable to create nonce generator");
|
|
||||||
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
|
||||||
"Unable to allocate nonce");
|
|
||||||
|
|
||||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_3072_BIT);
|
|
||||||
fail_if(!ke, "Unable to create first KE");
|
|
||||||
|
|
||||||
/* Use the same pubvalue for both sides */
|
|
||||||
chunk_t pubvalue;
|
|
||||||
ck_assert(ke->ke.get_public_key(&ke->ke, &pubvalue));
|
|
||||||
ck_assert(ke->ke.set_public_key(&ke->ke, pubvalue));
|
|
||||||
chunk_free(&pubvalue);
|
|
||||||
|
|
||||||
array_t *kes = NULL;
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
|
||||||
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
|
||||||
kes, nonce, nonce, ike_sa_id, PRF_UNDEFINED, chunk_empty),
|
|
||||||
"Key derivation failed");
|
|
||||||
array_destroy(kes);
|
|
||||||
ke->ke.destroy(&ke->ke);
|
|
||||||
|
|
||||||
const aead_t *aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
|
||||||
fail_if(!aead, "AEAD is NULL");
|
|
||||||
|
|
||||||
/* single KE during IKE_INTERMEDIATE on the same keymat with same nonces */
|
|
||||||
pseudo_random_function_t prf;
|
|
||||||
chunk_t skd;
|
|
||||||
prf = keymat->keymat_v2.get_skd(&keymat->keymat_v2, &skd);
|
|
||||||
fail_if(prf != PRF_HMAC_SHA2_512, "PRF incorrect");
|
|
||||||
|
|
||||||
ke = tkm_key_exchange_create(MODP_4096_BIT);
|
|
||||||
fail_if(!ke, "Unable to create second KE");
|
|
||||||
ck_assert(ke->ke.get_public_key(&ke->ke, &pubvalue));
|
|
||||||
ck_assert(ke->ke.set_public_key(&ke->ke, pubvalue));
|
|
||||||
chunk_free(&pubvalue);
|
|
||||||
|
|
||||||
kes = NULL;
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
|
||||||
fail_unless(keymat->keymat_v2.derive_ike_keys(&keymat->keymat_v2, proposal,
|
|
||||||
kes, nonce, nonce, ike_sa_id, prf, skd),
|
|
||||||
"Second key derivation failed");
|
|
||||||
array_destroy(kes);
|
|
||||||
ke->ke.destroy(&ke->ke);
|
|
||||||
chunk_free(&nonce);
|
|
||||||
|
|
||||||
aead = keymat->keymat_v2.keymat.get_aead(&keymat->keymat_v2.keymat, TRUE);
|
|
||||||
fail_if(!aead, "AEAD is NULL");
|
|
||||||
ng->nonce_gen.destroy(&ng->nonce_gen);
|
|
||||||
ike_sa_id->destroy(ike_sa_id);
|
|
||||||
|
|
||||||
/* rekeying uses a new keymat/SA/nonce and multiple KEs */
|
|
||||||
ike_sa_id = ike_sa_id_create(IKEV2_MAJOR_VERSION,
|
|
||||||
34912312312, 612313122, TRUE);
|
|
||||||
fail_if(!ike_sa_id, "Unable to create IKE SA ID");
|
|
||||||
|
|
||||||
tkm_keymat_t *keymat2 = tkm_keymat_create(TRUE);
|
|
||||||
fail_if(!keymat2, "Unable to create keymat");
|
|
||||||
fail_if(!keymat2->get_isa_id(keymat2), "Invalid ISA context id (0)");
|
|
||||||
|
|
||||||
ng = tkm_nonceg_create();
|
|
||||||
fail_if(!ng, "Unable to create nonce generator");
|
|
||||||
fail_unless(ng->nonce_gen.allocate_nonce(&ng->nonce_gen, 32, &nonce),
|
|
||||||
"Unable to allocate nonce");
|
|
||||||
|
|
||||||
tkm_key_exchange_t *ke1 = tkm_key_exchange_create(MODP_3072_BIT);
|
|
||||||
fail_if(!ke1, "Unable to create first KE");
|
|
||||||
ck_assert(ke1->ke.get_public_key(&ke1->ke, &pubvalue));
|
|
||||||
ck_assert(ke1->ke.set_public_key(&ke1->ke, pubvalue));
|
|
||||||
chunk_free(&pubvalue);
|
|
||||||
tkm_key_exchange_t *ke2 = tkm_key_exchange_create(MODP_4096_BIT);
|
|
||||||
fail_if(!ke2, "Unable to create second KE");
|
|
||||||
ck_assert(ke2->ke.get_public_key(&ke2->ke, &pubvalue));
|
|
||||||
ck_assert(ke2->ke.set_public_key(&ke2->ke, pubvalue));
|
|
||||||
chunk_free(&pubvalue);
|
|
||||||
|
|
||||||
prf = keymat->keymat_v2.get_skd(&keymat->keymat_v2, &skd);
|
|
||||||
fail_if(prf != PRF_HMAC_SHA2_512, "PRF incorrect");
|
|
||||||
|
|
||||||
kes = NULL;
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke1);
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke2);
|
|
||||||
fail_unless(keymat2->keymat_v2.derive_ike_keys(&keymat2->keymat_v2, proposal,
|
|
||||||
kes, nonce, nonce, ike_sa_id, prf, skd),
|
|
||||||
"Rekey key derivation failed");
|
|
||||||
array_destroy(kes);
|
|
||||||
ke1->ke.destroy(&ke1->ke);
|
|
||||||
ke2->ke.destroy(&ke2->ke);
|
|
||||||
chunk_free(&nonce);
|
|
||||||
|
|
||||||
aead = keymat2->keymat_v2.keymat.get_aead(&keymat2->keymat_v2.keymat, TRUE);
|
|
||||||
fail_if(!aead, "AEAD is NULL");
|
|
||||||
|
|
||||||
ng->nonce_gen.destroy(&ng->nonce_gen);
|
|
||||||
proposal->destroy(proposal);
|
|
||||||
ike_sa_id->destroy(ike_sa_id);
|
|
||||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
|
||||||
keymat2->keymat_v2.keymat.destroy(&keymat2->keymat_v2.keymat);
|
|
||||||
}
|
|
||||||
END_TEST
|
|
||||||
|
|
||||||
START_TEST(test_derive_child_keys)
|
START_TEST(test_derive_child_keys)
|
||||||
{
|
{
|
||||||
tkm_key_exchange_t *ke = tkm_key_exchange_create(MODP_4096_BIT);
|
tkm_diffie_hellman_t *dh = tkm_diffie_hellman_create(MODP_4096_BIT);
|
||||||
fail_if(!ke, "Unable to create DH object");
|
fail_if(!dh, "Unable to create DH object");
|
||||||
proposal_t *proposal = proposal_create_from_string(PROTO_ESP,
|
proposal_t *proposal = proposal_create_from_string(PROTO_ESP,
|
||||||
"aes256-sha512-modp4096");
|
"aes256-sha512-modp4096");
|
||||||
fail_if(!proposal, "Unable to create proposal");
|
fail_if(!proposal, "Unable to create proposal");
|
||||||
@ -208,33 +90,28 @@ START_TEST(test_derive_child_keys)
|
|||||||
fail_if(!keymat, "Unable to create keymat");
|
fail_if(!keymat, "Unable to create keymat");
|
||||||
|
|
||||||
chunk_t encr_i, encr_r, integ_i, integ_r;
|
chunk_t encr_i, encr_r, integ_i, integ_r;
|
||||||
chunk_t nonce_i = chunk_from_chars("test chunk 1"),
|
chunk_t nonce = chunk_from_chars("test chunk");
|
||||||
nonce_r = chunk_from_chars("test chunk 2");
|
|
||||||
|
|
||||||
array_t *kes = NULL;
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke);
|
|
||||||
fail_unless(keymat->keymat_v2.derive_child_keys(&keymat->keymat_v2, proposal,
|
fail_unless(keymat->keymat_v2.derive_child_keys(&keymat->keymat_v2, proposal,
|
||||||
kes, nonce_i, nonce_r, &encr_i,
|
&dh->ke,
|
||||||
|
nonce, nonce, &encr_i,
|
||||||
&integ_i, &encr_r, &integ_r),
|
&integ_i, &encr_r, &integ_r),
|
||||||
"Child key derivation failed");
|
"Child key derivation failed");
|
||||||
array_destroy(kes);
|
|
||||||
|
|
||||||
esa_info_t *info = (esa_info_t *)encr_i.ptr;
|
esa_info_t *info = (esa_info_t *)encr_i.ptr;
|
||||||
fail_if(!info, "encr_i does not contain esa information");
|
fail_if(!info, "encr_i does not contain esa information");
|
||||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
||||||
"Isa context id mismatch (encr_i)");
|
"Isa context id mismatch (encr_i)");
|
||||||
fail_if(info->spi_l != 42,
|
fail_if(info->spi_r != 42,
|
||||||
"SPI mismatch (encr_i)");
|
"SPI mismatch (encr_i)");
|
||||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
fail_unless(chunk_equals(info->nonce_i, nonce),
|
||||||
"nonce_i mismatch (encr_i)");
|
"nonce_i mismatch (encr_i)");
|
||||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
fail_unless(chunk_equals(info->nonce_r, nonce),
|
||||||
"nonce_r mismatch (encr_i)");
|
"nonce_r mismatch (encr_i)");
|
||||||
fail_if(info->is_encr_r,
|
fail_if(info->is_encr_r,
|
||||||
"Flag is_encr_r set for encr_i");
|
"Flag is_encr_r set for encr_i");
|
||||||
fail_if(info->ke_ids.size != 1,
|
fail_if(info->dh_id != dh->get_id(dh),
|
||||||
"KE context number mismatch (encr_i)");
|
"DH context id mismatch (encr_i)");
|
||||||
fail_if(info->ke_ids.data[0] != ke->get_id(ke),
|
|
||||||
"KE context id mismatch (encr_i)");
|
|
||||||
chunk_free(&info->nonce_i);
|
chunk_free(&info->nonce_i);
|
||||||
chunk_free(&info->nonce_r);
|
chunk_free(&info->nonce_r);
|
||||||
|
|
||||||
@ -242,101 +119,21 @@ START_TEST(test_derive_child_keys)
|
|||||||
fail_if(!info, "encr_r does not contain esa information");
|
fail_if(!info, "encr_r does not contain esa information");
|
||||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
||||||
"Isa context id mismatch (encr_r)");
|
"Isa context id mismatch (encr_r)");
|
||||||
fail_if(info->spi_l != 42,
|
fail_if(info->spi_r != 42,
|
||||||
"SPI mismatch (encr_r)");
|
"SPI mismatch (encr_r)");
|
||||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
fail_unless(chunk_equals(info->nonce_i, nonce),
|
||||||
"nonce_i mismatch (encr_r)");
|
"nonce_i mismatch (encr_r)");
|
||||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
fail_unless(chunk_equals(info->nonce_r, nonce),
|
||||||
"nonce_r mismatch (encr_r)");
|
"nonce_r mismatch (encr_r)");
|
||||||
fail_unless(info->is_encr_r,
|
fail_unless(info->is_encr_r,
|
||||||
"Flag is_encr_r set for encr_r");
|
"Flag is_encr_r set for encr_r");
|
||||||
fail_if(info->ke_ids.size != 1,
|
fail_if(info->dh_id != dh->get_id(dh),
|
||||||
"KE context number mismatch (encr_i)");
|
"DH context id mismatch (encr_i)");
|
||||||
fail_if(info->ke_ids.data[0] != ke->get_id(ke),
|
|
||||||
"KE context id mismatch (encr_i)");
|
|
||||||
chunk_free(&info->nonce_i);
|
chunk_free(&info->nonce_i);
|
||||||
chunk_free(&info->nonce_r);
|
chunk_free(&info->nonce_r);
|
||||||
|
|
||||||
proposal->destroy(proposal);
|
proposal->destroy(proposal);
|
||||||
ke->ke.destroy(&ke->ke);
|
dh->ke.destroy(&dh->ke);
|
||||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
|
||||||
chunk_free(&encr_i);
|
|
||||||
chunk_free(&encr_r);
|
|
||||||
}
|
|
||||||
END_TEST
|
|
||||||
|
|
||||||
START_TEST(test_derive_child_keys_multi_ke)
|
|
||||||
{
|
|
||||||
tkm_key_exchange_t *ke1 = tkm_key_exchange_create(MODP_3072_BIT);
|
|
||||||
fail_if(!ke1, "Unable to create DH object");
|
|
||||||
tkm_key_exchange_t *ke2 = tkm_key_exchange_create(MODP_4096_BIT);
|
|
||||||
fail_if(!ke2, "Unable to create DH object");
|
|
||||||
proposal_t *proposal = proposal_create_from_string(PROTO_ESP,
|
|
||||||
"aes256-sha512-modp4096");
|
|
||||||
fail_if(!proposal, "Unable to create proposal");
|
|
||||||
proposal->set_spi(proposal, 42);
|
|
||||||
|
|
||||||
tkm_keymat_t *keymat = tkm_keymat_create(TRUE);
|
|
||||||
fail_if(!keymat, "Unable to create keymat");
|
|
||||||
|
|
||||||
chunk_t encr_i, encr_r, integ_i, integ_r;
|
|
||||||
chunk_t nonce_i = chunk_from_chars("test chunk 1"),
|
|
||||||
nonce_r = chunk_from_chars("test chunk 2");
|
|
||||||
|
|
||||||
array_t *kes = NULL;
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke1);
|
|
||||||
array_insert_create(&kes, ARRAY_TAIL, ke2);
|
|
||||||
fail_unless(keymat->keymat_v2.derive_child_keys(&keymat->keymat_v2, proposal,
|
|
||||||
kes, nonce_i, nonce_r, &encr_i,
|
|
||||||
&integ_i, &encr_r, &integ_r),
|
|
||||||
"Child key derivation failed");
|
|
||||||
array_destroy(kes);
|
|
||||||
|
|
||||||
esa_info_t *info = (esa_info_t *)encr_i.ptr;
|
|
||||||
fail_if(!info, "encr_i does not contain esa information");
|
|
||||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
|
||||||
"Isa context id mismatch (encr_i)");
|
|
||||||
fail_if(info->spi_l != 42,
|
|
||||||
"SPI mismatch (encr_i)");
|
|
||||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
|
||||||
"nonce_i mismatch (encr_i)");
|
|
||||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
|
||||||
"nonce_r mismatch (encr_i)");
|
|
||||||
fail_if(info->is_encr_r,
|
|
||||||
"Flag is_encr_r set for encr_i");
|
|
||||||
fail_if(info->ke_ids.size != 2,
|
|
||||||
"KE context number mismatch (encr_i)");
|
|
||||||
fail_if(info->ke_ids.data[0] != ke1->get_id(ke1),
|
|
||||||
"KE context id mismatch (encr_i)");
|
|
||||||
fail_if(info->ke_ids.data[1] != ke2->get_id(ke2),
|
|
||||||
"KE context id mismatch (encr_i)");
|
|
||||||
chunk_free(&info->nonce_i);
|
|
||||||
chunk_free(&info->nonce_r);
|
|
||||||
|
|
||||||
info = (esa_info_t *)encr_r.ptr;
|
|
||||||
fail_if(!info, "encr_r does not contain esa information");
|
|
||||||
fail_if(info->isa_id != keymat->get_isa_id(keymat),
|
|
||||||
"Isa context id mismatch (encr_r)");
|
|
||||||
fail_if(info->spi_l != 42,
|
|
||||||
"SPI mismatch (encr_r)");
|
|
||||||
fail_unless(chunk_equals(info->nonce_i, nonce_i),
|
|
||||||
"nonce_i mismatch (encr_r)");
|
|
||||||
fail_unless(chunk_equals(info->nonce_r, nonce_r),
|
|
||||||
"nonce_r mismatch (encr_r)");
|
|
||||||
fail_unless(info->is_encr_r,
|
|
||||||
"Flag is_encr_r set for encr_r");
|
|
||||||
fail_if(info->ke_ids.size != 2,
|
|
||||||
"KE context number mismatch (encr_i)");
|
|
||||||
fail_if(info->ke_ids.data[0] != ke1->get_id(ke1),
|
|
||||||
"KE context id mismatch (encr_i)");
|
|
||||||
fail_if(info->ke_ids.data[1] != ke2->get_id(ke2),
|
|
||||||
"KE context id mismatch (encr_i)");
|
|
||||||
chunk_free(&info->nonce_i);
|
|
||||||
chunk_free(&info->nonce_r);
|
|
||||||
|
|
||||||
proposal->destroy(proposal);
|
|
||||||
ke1->ke.destroy(&ke1->ke);
|
|
||||||
ke2->ke.destroy(&ke2->ke);
|
|
||||||
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
keymat->keymat_v2.keymat.destroy(&keymat->keymat_v2.keymat);
|
||||||
chunk_free(&encr_i);
|
chunk_free(&encr_i);
|
||||||
chunk_free(&encr_r);
|
chunk_free(&encr_r);
|
||||||
@ -352,12 +149,10 @@ Suite *make_keymat_tests()
|
|||||||
|
|
||||||
tc = tcase_create("derive IKE keys");
|
tc = tcase_create("derive IKE keys");
|
||||||
tcase_add_test(tc, test_derive_ike_keys);
|
tcase_add_test(tc, test_derive_ike_keys);
|
||||||
tcase_add_test(tc, test_derive_ike_keys_multi_ke);
|
|
||||||
suite_add_tcase(s, tc);
|
suite_add_tcase(s, tc);
|
||||||
|
|
||||||
tc = tcase_create("derive CHILD keys");
|
tc = tcase_create("derive CHILD keys");
|
||||||
tcase_add_test(tc, test_derive_child_keys);
|
tcase_add_test(tc, test_derive_child_keys);
|
||||||
tcase_add_test(tc, test_derive_child_keys_multi_ke);
|
|
||||||
suite_add_tcase(s, tc);
|
suite_add_tcase(s, tc);
|
||||||
|
|
||||||
return s;
|
return s;
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
|
|
||||||
#include "tkm.h"
|
#include "tkm.h"
|
||||||
#include "tkm_nonceg.h"
|
#include "tkm_nonceg.h"
|
||||||
#include "tkm_key_exchange.h"
|
#include "tkm_diffie_hellman.h"
|
||||||
#include "tkm_kernel_ipsec.h"
|
#include "tkm_kernel_ipsec.h"
|
||||||
|
|
||||||
/* declare test suite constructors */
|
/* declare test suite constructors */
|
||||||
@ -75,11 +75,11 @@ static bool test_runner_init(bool init)
|
|||||||
lib->plugins->add_static_features(lib->plugins, "tkm-tests", features,
|
lib->plugins->add_static_features(lib->plugins, "tkm-tests", features,
|
||||||
countof(features), TRUE, NULL, NULL);
|
countof(features), TRUE, NULL, NULL);
|
||||||
|
|
||||||
lib->settings->set_int(lib->settings, "%s.ke_mapping.%d", 1,
|
lib->settings->set_int(lib->settings, "%s.dh_mapping.%d", 1,
|
||||||
lib->ns, MODP_3072_BIT);
|
lib->ns, MODP_3072_BIT);
|
||||||
lib->settings->set_int(lib->settings, "%s.ke_mapping.%d", 2,
|
lib->settings->set_int(lib->settings, "%s.dh_mapping.%d", 2,
|
||||||
lib->ns, MODP_4096_BIT);
|
lib->ns, MODP_4096_BIT);
|
||||||
register_ke_mapping();
|
register_dh_mapping();
|
||||||
|
|
||||||
plugin_loader_add_plugindirs(BUILDDIR "/src/libstrongswan/plugins",
|
plugin_loader_add_plugindirs(BUILDDIR "/src/libstrongswan/plugins",
|
||||||
PLUGINS);
|
PLUGINS);
|
||||||
@ -100,7 +100,7 @@ static bool test_runner_init(bool init)
|
|||||||
result = FALSE;
|
result = FALSE;
|
||||||
}
|
}
|
||||||
|
|
||||||
destroy_ke_mapping();
|
destroy_dh_mapping();
|
||||||
libcharon_deinit();
|
libcharon_deinit();
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -19,6 +19,6 @@ TEST_SUITE(make_id_manager_tests)
|
|||||||
TEST_SUITE(make_chunk_map_tests)
|
TEST_SUITE(make_chunk_map_tests)
|
||||||
TEST_SUITE(make_utility_tests)
|
TEST_SUITE(make_utility_tests)
|
||||||
TEST_SUITE_DEPEND(make_nonceg_tests, CUSTOM, "tkm")
|
TEST_SUITE_DEPEND(make_nonceg_tests, CUSTOM, "tkm")
|
||||||
TEST_SUITE_DEPEND(make_key_exchange_tests, CUSTOM, "tkm")
|
TEST_SUITE_DEPEND(make_diffie_hellman_tests, CUSTOM, "tkm")
|
||||||
TEST_SUITE_DEPEND(make_keymat_tests, CUSTOM, "tkm")
|
TEST_SUITE_DEPEND(make_keymat_tests, CUSTOM, "tkm")
|
||||||
TEST_SUITE(make_kernel_sad_tests)
|
TEST_SUITE(make_kernel_sad_tests)
|
||||||
|
@ -48,6 +48,11 @@ if USE_RADIUS
|
|||||||
libs += $(DESTDIR)$(ipseclibdir)/libradius.so
|
libs += $(DESTDIR)$(ipseclibdir)/libradius.so
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
if USE_LIBNTTFFT
|
||||||
|
deps += $(top_builddir)/src/libstrongswan/math/libnttfft/libnttfft.la
|
||||||
|
libs += $(DESTDIR)$(ipseclibdir)/libnttfft.so
|
||||||
|
endif
|
||||||
|
|
||||||
if USE_LIBPTTLS
|
if USE_LIBPTTLS
|
||||||
deps += $(top_builddir)/src/libpttls/libpttls.la
|
deps += $(top_builddir)/src/libpttls/libpttls.la
|
||||||
libs += $(DESTDIR)$(ipseclibdir)/libpttls.so
|
libs += $(DESTDIR)$(ipseclibdir)/libpttls.so
|
||||||
|
@ -67,8 +67,6 @@ static void build_checksum(char *path, char *name, char *sname)
|
|||||||
name, fsize, fsum, ssize, ssum);
|
name, fsize, fsum, ssize, ssum);
|
||||||
}
|
}
|
||||||
|
|
||||||
#if defined(S_PLUGINS) || defined(P_PLUGINS) || \
|
|
||||||
defined(T_PLUGINS) || defined(C_PLUGINS)
|
|
||||||
/**
|
/**
|
||||||
* Build checksums for a set of plugins
|
* Build checksums for a set of plugins
|
||||||
*/
|
*/
|
||||||
@ -90,7 +88,6 @@ static void build_plugin_checksums(char *plugins)
|
|||||||
}
|
}
|
||||||
enumerator->destroy(enumerator);
|
enumerator->destroy(enumerator);
|
||||||
}
|
}
|
||||||
#endif
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Build checksums for a binary/library found at path
|
* Build checksums for a binary/library found at path
|
||||||
|
@ -265,7 +265,7 @@ static peer_cfg_t *load_peer_config(private_config_t *this,
|
|||||||
.cert_policy = CERT_ALWAYS_SEND,
|
.cert_policy = CERT_ALWAYS_SEND,
|
||||||
.unique = UNIQUE_NO,
|
.unique = UNIQUE_NO,
|
||||||
.keyingtries = 1,
|
.keyingtries = 1,
|
||||||
.options = OPT_NO_MOBIKE,
|
.no_mobike = TRUE,
|
||||||
};
|
};
|
||||||
|
|
||||||
ike_cfg = load_ike_config(this, settings, config);
|
ike_cfg = load_ike_config(this, settings, config);
|
||||||
|
@ -382,28 +382,19 @@ static void load_log_levels(file_logger_t *logger, char *section)
|
|||||||
*/
|
*/
|
||||||
static void load_logger_options(file_logger_t *logger, char *section)
|
static void load_logger_options(file_logger_t *logger, char *section)
|
||||||
{
|
{
|
||||||
file_logger_options_t options;
|
char *time_format;
|
||||||
|
bool add_ms, ike_name, log_level;
|
||||||
|
|
||||||
options.time_format = conftest->test->get_str(conftest->test,
|
time_format = conftest->test->get_str(conftest->test,
|
||||||
"log.%s.time_format", NULL, section);
|
"log.%s.time_format", NULL, section);
|
||||||
options.time_precision = file_logger_time_precision_parse(
|
add_ms = conftest->test->get_bool(conftest->test,
|
||||||
conftest->test->get_str(conftest->test,
|
"log.%s.time_add_ms", FALSE, section);
|
||||||
"log.%s.time_precision", NULL, section));
|
ike_name = conftest->test->get_bool(conftest->test,
|
||||||
/* handle legacy option */
|
|
||||||
if (!options.time_precision &&
|
|
||||||
conftest->test->get_bool(conftest->test,
|
|
||||||
"log.%s.time_add_ms", FALSE, section))
|
|
||||||
{
|
|
||||||
options.time_precision = FILE_LOGGER_TIME_PRECISION_MS;
|
|
||||||
}
|
|
||||||
options.ike_name = conftest->test->get_bool(conftest->test,
|
|
||||||
"log.%s.ike_name", FALSE, section);
|
"log.%s.ike_name", FALSE, section);
|
||||||
options.log_level = conftest->test->get_bool(conftest->test,
|
log_level = conftest->test->get_bool(conftest->test,
|
||||||
"log.%s.log_level", FALSE, section);
|
"log.%s.log_level", FALSE, section);
|
||||||
options.json = conftest->test->get_bool(conftest->test,
|
|
||||||
"log.%s.json", FALSE, section);
|
|
||||||
|
|
||||||
logger->set_options(logger, &options);
|
logger->set_options(logger, time_format, add_ms, ike_name, log_level);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -447,7 +438,6 @@ int main(int argc, char *argv[])
|
|||||||
int sig;
|
int sig;
|
||||||
char *suite_file = "suite.conf", *test_file = NULL, *preload, *plugins;
|
char *suite_file = "suite.conf", *test_file = NULL, *preload, *plugins;
|
||||||
file_logger_t *logger;
|
file_logger_t *logger;
|
||||||
file_logger_options_t options = {};
|
|
||||||
|
|
||||||
if (!library_init(NULL, "conftest"))
|
if (!library_init(NULL, "conftest"))
|
||||||
{
|
{
|
||||||
@ -470,7 +460,7 @@ int main(int argc, char *argv[])
|
|||||||
lib->credmgr->add_set(lib->credmgr, &conftest->creds->set);
|
lib->credmgr->add_set(lib->credmgr, &conftest->creds->set);
|
||||||
|
|
||||||
logger = file_logger_create("stdout");
|
logger = file_logger_create("stdout");
|
||||||
logger->set_options(logger, &options);
|
logger->set_options(logger, NULL, FALSE, FALSE, FALSE);
|
||||||
logger->open(logger, FALSE, FALSE);
|
logger->open(logger, FALSE, FALSE);
|
||||||
logger->set_level(logger, DBG_ANY, LEVEL_CTRL);
|
logger->set_level(logger, DBG_ANY, LEVEL_CTRL);
|
||||||
charon->bus->add_logger(charon->bus, &logger->logger);
|
charon->bus->add_logger(charon->bus, &logger->logger);
|
||||||
|
@ -239,8 +239,8 @@ static bool build_auth(private_pretend_auth_t *this,
|
|||||||
}
|
}
|
||||||
keymat = (keymat_v2_t*)ike_sa->get_keymat(ike_sa);
|
keymat = (keymat_v2_t*)ike_sa->get_keymat(ike_sa);
|
||||||
if (!keymat->get_auth_octets(keymat, TRUE, this->ike_init, this->nonce,
|
if (!keymat->get_auth_octets(keymat, TRUE, this->ike_init, this->nonce,
|
||||||
chunk_empty, chunk_empty, this->id,
|
chunk_empty, this->id, this->reserved,
|
||||||
this->reserved, &octets, NULL))
|
&octets, NULL))
|
||||||
{
|
{
|
||||||
private->destroy(private);
|
private->destroy(private);
|
||||||
return FALSE;
|
return FALSE;
|
||||||
|
@ -138,8 +138,7 @@ static bool rebuild_auth(private_rebuild_auth_t *this, ike_sa_t *ike_sa,
|
|||||||
}
|
}
|
||||||
keymat = (keymat_v2_t*)ike_sa->get_keymat(ike_sa);
|
keymat = (keymat_v2_t*)ike_sa->get_keymat(ike_sa);
|
||||||
if (!keymat->get_auth_octets(keymat, FALSE, this->ike_init, this->nonce,
|
if (!keymat->get_auth_octets(keymat, FALSE, this->ike_init, this->nonce,
|
||||||
chunk_empty, chunk_empty, id, reserved,
|
chunk_empty, id, reserved, &octets, NULL))
|
||||||
&octets, NULL))
|
|
||||||
{
|
{
|
||||||
private->destroy(private);
|
private->destroy(private);
|
||||||
id->destroy(id);
|
id->destroy(id);
|
||||||
|
@ -7,10 +7,10 @@ android {
|
|||||||
applicationId "org.strongswan.android"
|
applicationId "org.strongswan.android"
|
||||||
compileSdk 34
|
compileSdk 34
|
||||||
minSdkVersion 21
|
minSdkVersion 21
|
||||||
targetSdkVersion 34
|
targetSdkVersion 33
|
||||||
|
|
||||||
versionCode 91
|
versionCode 82
|
||||||
versionName "2.5.6"
|
versionName "2.5.1"
|
||||||
|
|
||||||
externalNativeBuild {
|
externalNativeBuild {
|
||||||
ndkBuild {
|
ndkBuild {
|
||||||
@ -19,7 +19,7 @@ android {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ndkVersion "27.2.12479018"
|
ndkVersion "26.1.10909125"
|
||||||
|
|
||||||
externalNativeBuild {
|
externalNativeBuild {
|
||||||
ndkBuild {
|
ndkBuild {
|
||||||
@ -45,10 +45,10 @@ android {
|
|||||||
}
|
}
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation 'androidx.appcompat:appcompat:1.7.1'
|
implementation 'androidx.appcompat:appcompat:1.6.1'
|
||||||
implementation 'androidx.lifecycle:lifecycle-process:2.9.1'
|
implementation 'androidx.lifecycle:lifecycle-process:2.7.0'
|
||||||
implementation 'androidx.preference:preference:1.2.1'
|
implementation 'androidx.preference:preference:1.2.1'
|
||||||
implementation 'com.google.android.material:material:1.12.0'
|
implementation 'com.google.android.material:material:1.10.0'
|
||||||
testImplementation 'junit:junit:4.13.2'
|
testImplementation 'junit:junit:4.13.2'
|
||||||
testImplementation 'org.assertj:assertj-core:3.24.2'
|
testImplementation 'org.assertj:assertj-core:3.24.2'
|
||||||
testImplementation 'org.mockito:mockito-core:5.8.0'
|
testImplementation 'org.mockito:mockito-core:5.8.0'
|
||||||
|
@ -22,11 +22,9 @@
|
|||||||
<uses-permission android:name="android.permission.INTERNET" />
|
<uses-permission android:name="android.permission.INTERNET" />
|
||||||
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
|
||||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
<uses-permission android:name="android.permission.FOREGROUND_SERVICE" />
|
||||||
<uses-permission android:name="android.permission.FOREGROUND_SERVICE_SPECIAL_USE" />
|
|
||||||
<uses-permission android:name="android.permission.POST_NOTIFICATIONS"/>
|
<uses-permission android:name="android.permission.POST_NOTIFICATIONS"/>
|
||||||
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
|
<uses-permission android:name="android.permission.READ_EXTERNAL_STORAGE" />
|
||||||
<uses-permission android:name="android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS" />
|
<uses-permission android:name="android.permission.REQUEST_IGNORE_BATTERY_OPTIMIZATIONS" />
|
||||||
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />
|
|
||||||
<!-- necessary to allow users to select ex-/included apps and EAP-TNC -->
|
<!-- necessary to allow users to select ex-/included apps and EAP-TNC -->
|
||||||
<uses-permission android:name="android.permission.QUERY_ALL_PACKAGES"
|
<uses-permission android:name="android.permission.QUERY_ALL_PACKAGES"
|
||||||
tools:ignore="QueryAllPackagesPermission" />
|
tools:ignore="QueryAllPackagesPermission" />
|
||||||
@ -37,7 +35,6 @@
|
|||||||
android:label="@string/app_name"
|
android:label="@string/app_name"
|
||||||
android:theme="@style/ApplicationTheme"
|
android:theme="@style/ApplicationTheme"
|
||||||
android:networkSecurityConfig="@xml/network_security_config"
|
android:networkSecurityConfig="@xml/network_security_config"
|
||||||
android:enableOnBackInvokedCallback="true"
|
|
||||||
android:allowBackup="false" >
|
android:allowBackup="false" >
|
||||||
<activity
|
<activity
|
||||||
android:name=".ui.MainActivity"
|
android:name=".ui.MainActivity"
|
||||||
@ -179,14 +176,10 @@
|
|||||||
<service
|
<service
|
||||||
android:name=".logic.CharonVpnService"
|
android:name=".logic.CharonVpnService"
|
||||||
android:exported="false"
|
android:exported="false"
|
||||||
android:foregroundServiceType="specialUse"
|
|
||||||
android:permission="android.permission.BIND_VPN_SERVICE">
|
android:permission="android.permission.BIND_VPN_SERVICE">
|
||||||
<intent-filter>
|
<intent-filter>
|
||||||
<action android:name="android.net.VpnService" />
|
<action android:name="android.net.VpnService" />
|
||||||
</intent-filter>
|
</intent-filter>
|
||||||
<property
|
|
||||||
android:name="android.app.PROPERTY_SPECIAL_USE_FGS_SUBTYPE"
|
|
||||||
android:value="VpnService instance"/>
|
|
||||||
</service>
|
</service>
|
||||||
<service
|
<service
|
||||||
android:name=".ui.VpnTileService"
|
android:name=".ui.VpnTileService"
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2023 Relution GmbH
|
* Copyright (C) 2023 Relution GmbH
|
||||||
* Copyright (C) 2012-2025 Tobias Brunner
|
* Copyright (C) 2012-2024 Tobias Brunner
|
||||||
* Copyright (C) 2012 Giuliano Grassi
|
* Copyright (C) 2012 Giuliano Grassi
|
||||||
* Copyright (C) 2012 Ralf Sager
|
* Copyright (C) 2012 Ralf Sager
|
||||||
*
|
*
|
||||||
@ -67,9 +67,6 @@ public class DatabaseHelper extends SQLiteOpenHelper
|
|||||||
new DbColumn(VpnProfileDataSource.KEY_IKE_PROPOSAL, "TEXT", 15),
|
new DbColumn(VpnProfileDataSource.KEY_IKE_PROPOSAL, "TEXT", 15),
|
||||||
new DbColumn(VpnProfileDataSource.KEY_ESP_PROPOSAL, "TEXT", 15),
|
new DbColumn(VpnProfileDataSource.KEY_ESP_PROPOSAL, "TEXT", 15),
|
||||||
new DbColumn(VpnProfileDataSource.KEY_DNS_SERVERS, "TEXT", 17),
|
new DbColumn(VpnProfileDataSource.KEY_DNS_SERVERS, "TEXT", 17),
|
||||||
new DbColumn(VpnProfileDataSource.KEY_PROXY_HOST, "TEXT", 19),
|
|
||||||
new DbColumn(VpnProfileDataSource.KEY_PROXY_PORT, "INTEGER", 19),
|
|
||||||
new DbColumn(VpnProfileDataSource.KEY_PROXY_EXCLUSIONS, "TEXT", 19),
|
|
||||||
});
|
});
|
||||||
|
|
||||||
public static final DbTable TABLE_TRUSTED_CERTIFICATE = new DbTable(TABLE_NAME_TRUSTED_CERTIFICATE, 18, new DbColumn[]{
|
public static final DbTable TABLE_TRUSTED_CERTIFICATE = new DbTable(TABLE_NAME_TRUSTED_CERTIFICATE, 18, new DbColumn[]{
|
||||||
@ -87,7 +84,7 @@ public class DatabaseHelper extends SQLiteOpenHelper
|
|||||||
new DbColumn(ManagedUserCertificate.KEY_PASSWORD, "TEXT", 18),
|
new DbColumn(ManagedUserCertificate.KEY_PASSWORD, "TEXT", 18),
|
||||||
});
|
});
|
||||||
|
|
||||||
private static final int DATABASE_VERSION = 19;
|
private static final int DATABASE_VERSION = 18;
|
||||||
|
|
||||||
private static final Set<DbTable> TABLES;
|
private static final Set<DbTable> TABLES;
|
||||||
|
|
||||||
|
@ -121,7 +121,6 @@ public class ManagedConfiguration
|
|||||||
return Arrays.asList(bundles);
|
return Arrays.asList(bundles);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
@NonNull
|
@NonNull
|
||||||
private static List<Bundle> getBundleArrayListCompat(final Bundle bundle, final String key)
|
private static List<Bundle> getBundleArrayListCompat(final Bundle bundle, final String key)
|
||||||
{
|
{
|
||||||
|
@ -68,9 +68,9 @@ public class ManagedVpnProfile extends VpnProfile
|
|||||||
|
|
||||||
setMTU(getInt(bundle, VpnProfileDataSource.KEY_MTU, Constants.MTU_MIN, Constants.MTU_MAX));
|
setMTU(getInt(bundle, VpnProfileDataSource.KEY_MTU, Constants.MTU_MIN, Constants.MTU_MAX));
|
||||||
setNATKeepAlive(getInt(bundle, VpnProfileDataSource.KEY_NAT_KEEPALIVE, Constants.NAT_KEEPALIVE_MIN, Constants.NAT_KEEPALIVE_MAX));
|
setNATKeepAlive(getInt(bundle, VpnProfileDataSource.KEY_NAT_KEEPALIVE, Constants.NAT_KEEPALIVE_MIN, Constants.NAT_KEEPALIVE_MAX));
|
||||||
setIkeProposal(getString(bundle, VpnProfileDataSource.KEY_IKE_PROPOSAL));
|
setIkeProposal(bundle.getString(VpnProfileDataSource.KEY_IKE_PROPOSAL));
|
||||||
setEspProposal(getString(bundle, VpnProfileDataSource.KEY_ESP_PROPOSAL));
|
setEspProposal(bundle.getString(VpnProfileDataSource.KEY_ESP_PROPOSAL));
|
||||||
setDnsServers(getString(bundle, VpnProfileDataSource.KEY_DNS_SERVERS));
|
setDnsServers(bundle.getString(VpnProfileDataSource.KEY_DNS_SERVERS));
|
||||||
flags = addPositiveFlag(flags, bundle, KEY_TRANSPORT_IPV6_FLAG, VpnProfile.FLAGS_IPv6_TRANSPORT);
|
flags = addPositiveFlag(flags, bundle, KEY_TRANSPORT_IPV6_FLAG, VpnProfile.FLAGS_IPv6_TRANSPORT);
|
||||||
|
|
||||||
final Bundle splitTunneling = bundle.getBundle(VpnProfileDataSource.KEY_SPLIT_TUNNELING);
|
final Bundle splitTunneling = bundle.getBundle(VpnProfileDataSource.KEY_SPLIT_TUNNELING);
|
||||||
@ -79,16 +79,8 @@ public class ManagedVpnProfile extends VpnProfile
|
|||||||
splitFlags = addPositiveFlag(splitFlags, splitTunneling, KEY_SPLIT_TUNNELLING_BLOCK_IPV4_FLAG, VpnProfile.SPLIT_TUNNELING_BLOCK_IPV4);
|
splitFlags = addPositiveFlag(splitFlags, splitTunneling, KEY_SPLIT_TUNNELLING_BLOCK_IPV4_FLAG, VpnProfile.SPLIT_TUNNELING_BLOCK_IPV4);
|
||||||
splitFlags = addPositiveFlag(splitFlags, splitTunneling, KEY_SPLIT_TUNNELLING_BLOCK_IPV6_FLAG, VpnProfile.SPLIT_TUNNELING_BLOCK_IPV6);
|
splitFlags = addPositiveFlag(splitFlags, splitTunneling, KEY_SPLIT_TUNNELLING_BLOCK_IPV6_FLAG, VpnProfile.SPLIT_TUNNELING_BLOCK_IPV6);
|
||||||
|
|
||||||
setExcludedSubnets(getString(splitTunneling, VpnProfileDataSource.KEY_EXCLUDED_SUBNETS));
|
setExcludedSubnets(splitTunneling.getString(VpnProfileDataSource.KEY_EXCLUDED_SUBNETS));
|
||||||
setIncludedSubnets(getString(splitTunneling, VpnProfileDataSource.KEY_INCLUDED_SUBNETS));
|
setIncludedSubnets(splitTunneling.getString(VpnProfileDataSource.KEY_INCLUDED_SUBNETS));
|
||||||
}
|
|
||||||
|
|
||||||
final Bundle proxyServer = bundle.getBundle(VpnProfileDataSource.KEY_PROXY_SERVER);
|
|
||||||
if (proxyServer != null)
|
|
||||||
{
|
|
||||||
setProxyHost(getString(proxyServer, VpnProfileDataSource.KEY_PROXY_HOST));
|
|
||||||
setProxyPort(getInt(proxyServer, VpnProfileDataSource.KEY_PROXY_PORT, 1, 65_535));
|
|
||||||
setProxyExclusions(getString(proxyServer, VpnProfileDataSource.KEY_PROXY_EXCLUSIONS));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
setSplitTunneling(splitFlags);
|
setSplitTunneling(splitFlags);
|
||||||
@ -118,7 +110,7 @@ public class ManagedVpnProfile extends VpnProfile
|
|||||||
|
|
||||||
setGateway(remote.getString(VpnProfileDataSource.KEY_GATEWAY));
|
setGateway(remote.getString(VpnProfileDataSource.KEY_GATEWAY));
|
||||||
setPort(getInt(remote, VpnProfileDataSource.KEY_PORT, 1, 65_535));
|
setPort(getInt(remote, VpnProfileDataSource.KEY_PORT, 1, 65_535));
|
||||||
setRemoteId(getString(remote, VpnProfileDataSource.KEY_REMOTE_ID));
|
setRemoteId(remote.getString(VpnProfileDataSource.KEY_REMOTE_ID));
|
||||||
|
|
||||||
final String certificateData = remote.getString(VpnProfileDataSource.KEY_CERTIFICATE);
|
final String certificateData = remote.getString(VpnProfileDataSource.KEY_CERTIFICATE);
|
||||||
if (!TextUtils.isEmpty(certificateData))
|
if (!TextUtils.isEmpty(certificateData))
|
||||||
@ -141,9 +133,8 @@ public class ManagedVpnProfile extends VpnProfile
|
|||||||
return flags;
|
return flags;
|
||||||
}
|
}
|
||||||
|
|
||||||
setLocalId(getString(local, VpnProfileDataSource.KEY_LOCAL_ID));
|
setLocalId(local.getString(VpnProfileDataSource.KEY_LOCAL_ID));
|
||||||
setUsername(getString(local, VpnProfileDataSource.KEY_USERNAME));
|
setUsername(local.getString(VpnProfileDataSource.KEY_USERNAME));
|
||||||
setPassword(getString(local, VpnProfileDataSource.KEY_PASSWORD));
|
|
||||||
|
|
||||||
final String userCertificateData = local.getString(VpnProfileDataSource.KEY_USER_CERTIFICATE);
|
final String userCertificateData = local.getString(VpnProfileDataSource.KEY_USER_CERTIFICATE);
|
||||||
final String userCertificatePassword = local.getString(VpnProfileDataSource.KEY_USER_CERTIFICATE_PASSWORD, "");
|
final String userCertificatePassword = local.getString(VpnProfileDataSource.KEY_USER_CERTIFICATE_PASSWORD, "");
|
||||||
@ -163,12 +154,6 @@ public class ManagedVpnProfile extends VpnProfile
|
|||||||
return value < min || value > max ? null : value;
|
return value < min || value > max ? null : value;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getString(final Bundle bundle, final String key)
|
|
||||||
{
|
|
||||||
final String value = bundle.getString(key);
|
|
||||||
return TextUtils.isEmpty(value) ? null : value;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static int addPositiveFlag(int flags, Bundle bundle, String key, int flag)
|
private static int addPositiveFlag(int flags, Bundle bundle, String key, int flag)
|
||||||
{
|
{
|
||||||
if (bundle.getBoolean(key))
|
if (bundle.getBoolean(key))
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2012-2025 Tobias Brunner
|
* Copyright (C) 2012-2019 Tobias Brunner
|
||||||
* Copyright (C) 2012 Giuliano Grassi
|
* Copyright (C) 2012 Giuliano Grassi
|
||||||
* Copyright (C) 2012 Ralf Sager
|
* Copyright (C) 2012 Ralf Sager
|
||||||
*
|
*
|
||||||
@ -42,8 +42,8 @@ public class VpnProfile implements Cloneable
|
|||||||
|
|
||||||
private String mName, mGateway, mUsername, mPassword, mCertificate, mUserCertificate;
|
private String mName, mGateway, mUsername, mPassword, mCertificate, mUserCertificate;
|
||||||
private String mRemoteId, mLocalId, mExcludedSubnets, mIncludedSubnets, mSelectedApps;
|
private String mRemoteId, mLocalId, mExcludedSubnets, mIncludedSubnets, mSelectedApps;
|
||||||
private String mIkeProposal, mEspProposal, mDnsServers, mProxyHost, mProxyExclusions;
|
private String mIkeProposal, mEspProposal, mDnsServers;
|
||||||
private Integer mMTU, mPort, mProxyPort, mSplitTunneling, mNATKeepAlive, mFlags;
|
private Integer mMTU, mPort, mSplitTunneling, mNATKeepAlive, mFlags;
|
||||||
private SelectedAppsHandling mSelectedAppsHandling = SelectedAppsHandling.SELECTED_APPS_DISABLE;
|
private SelectedAppsHandling mSelectedAppsHandling = SelectedAppsHandling.SELECTED_APPS_DISABLE;
|
||||||
private VpnType mVpnType;
|
private VpnType mVpnType;
|
||||||
private UUID mUUID;
|
private UUID mUUID;
|
||||||
@ -313,36 +313,6 @@ public class VpnProfile implements Cloneable
|
|||||||
return mSelectedAppsHandling;
|
return mSelectedAppsHandling;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getProxyHost()
|
|
||||||
{
|
|
||||||
return mProxyHost;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setProxyHost(String proxy)
|
|
||||||
{
|
|
||||||
this.mProxyHost = proxy;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Integer getProxyPort()
|
|
||||||
{
|
|
||||||
return mProxyPort;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setProxyPort(Integer port)
|
|
||||||
{
|
|
||||||
this.mProxyPort = port;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getProxyExclusions()
|
|
||||||
{
|
|
||||||
return mProxyExclusions;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setProxyExclusions(String exclusions)
|
|
||||||
{
|
|
||||||
this.mProxyExclusions = exclusions;
|
|
||||||
}
|
|
||||||
|
|
||||||
public Integer getSplitTunneling()
|
public Integer getSplitTunneling()
|
||||||
{
|
{
|
||||||
return mSplitTunneling;
|
return mSplitTunneling;
|
||||||
|
@ -50,10 +50,6 @@ public interface VpnProfileDataSource
|
|||||||
String KEY_IKE_PROPOSAL = "ike_proposal";
|
String KEY_IKE_PROPOSAL = "ike_proposal";
|
||||||
String KEY_ESP_PROPOSAL = "esp_proposal";
|
String KEY_ESP_PROPOSAL = "esp_proposal";
|
||||||
String KEY_DNS_SERVERS = "dns_servers";
|
String KEY_DNS_SERVERS = "dns_servers";
|
||||||
String KEY_PROXY_SERVER = "proxy_server";
|
|
||||||
String KEY_PROXY_HOST = "proxy_host";
|
|
||||||
String KEY_PROXY_PORT = "proxy_port";
|
|
||||||
String KEY_PROXY_EXCLUSIONS = "proxy_exclusions";
|
|
||||||
String KEY_READ_ONLY = "read_only";
|
String KEY_READ_ONLY = "read_only";
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2025 Tobias Brunner
|
|
||||||
* Copyright (C) 2023 Relution GmbH
|
* Copyright (C) 2023 Relution GmbH
|
||||||
*
|
*
|
||||||
* Copyright (C) secunet Security Networks AG
|
* Copyright (C) secunet Security Networks AG
|
||||||
@ -76,14 +75,17 @@ public class VpnProfileManagedDataSource implements VpnProfileDataSource
|
|||||||
@Override
|
@Override
|
||||||
public boolean updateVpnProfile(VpnProfile profile)
|
public boolean updateVpnProfile(VpnProfile profile)
|
||||||
{
|
{
|
||||||
final VpnProfile managedProfile = mManagedConfigurationService.getManagedProfiles().get(profile.getUUID().toString());
|
final VpnProfile existingProfile = getVpnProfile(profile.getUUID());
|
||||||
if (managedProfile == null)
|
if (existingProfile == null)
|
||||||
{
|
{
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final String password = profile.getPassword();
|
||||||
|
existingProfile.setPassword(password);
|
||||||
|
|
||||||
final SharedPreferences.Editor editor = mSharedPreferences.edit();
|
final SharedPreferences.Editor editor = mSharedPreferences.edit();
|
||||||
editor.putString(profile.getUUID().toString(), profile.getPassword());
|
editor.putString(profile.getUUID().toString(), password);
|
||||||
return editor.commit();
|
return editor.commit();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -93,28 +95,10 @@ public class VpnProfileManagedDataSource implements VpnProfileDataSource
|
|||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Clone and prepare the given managed profile before handing it out.
|
|
||||||
* @param managedProfile profile to prepare
|
|
||||||
*/
|
|
||||||
private VpnProfile prepareVpnProfile(VpnProfile managedProfile)
|
|
||||||
{
|
|
||||||
final String password = mSharedPreferences.getString(managedProfile.getUUID().toString(), managedProfile.getPassword());
|
|
||||||
final VpnProfile vpnProfile = managedProfile.clone();
|
|
||||||
vpnProfile.setPassword(password);
|
|
||||||
vpnProfile.setDataSource(this);
|
|
||||||
return vpnProfile;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public VpnProfile getVpnProfile(UUID uuid)
|
public VpnProfile getVpnProfile(UUID uuid)
|
||||||
{
|
{
|
||||||
final VpnProfile managedProfile = mManagedConfigurationService.getManagedProfiles().get(uuid.toString());
|
return mManagedConfigurationService.getManagedProfiles().get(uuid.toString());
|
||||||
if (managedProfile != null)
|
|
||||||
{
|
|
||||||
return prepareVpnProfile(managedProfile);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -122,9 +106,12 @@ public class VpnProfileManagedDataSource implements VpnProfileDataSource
|
|||||||
{
|
{
|
||||||
final Map<String, ManagedVpnProfile> managedVpnProfiles = mManagedConfigurationService.getManagedProfiles();
|
final Map<String, ManagedVpnProfile> managedVpnProfiles = mManagedConfigurationService.getManagedProfiles();
|
||||||
final List<VpnProfile> vpnProfiles = new ArrayList<>();
|
final List<VpnProfile> vpnProfiles = new ArrayList<>();
|
||||||
for (final VpnProfile managedProfile : managedVpnProfiles.values())
|
for (final VpnProfile vpnProfile : managedVpnProfiles.values())
|
||||||
{
|
{
|
||||||
vpnProfiles.add(prepareVpnProfile(managedProfile));
|
final String password = mSharedPreferences.getString(vpnProfile.getUUID().toString(), vpnProfile.getPassword());
|
||||||
|
vpnProfile.setPassword(password);
|
||||||
|
vpnProfile.setDataSource(this);
|
||||||
|
vpnProfiles.add(vpnProfile);
|
||||||
}
|
}
|
||||||
return vpnProfiles;
|
return vpnProfiles;
|
||||||
}
|
}
|
||||||
|
@ -151,9 +151,6 @@ public class VpnProfileSqlDataSource implements VpnProfileDataSource
|
|||||||
profile.setIkeProposal(cursor.getString(cursor.getColumnIndexOrThrow(KEY_IKE_PROPOSAL)));
|
profile.setIkeProposal(cursor.getString(cursor.getColumnIndexOrThrow(KEY_IKE_PROPOSAL)));
|
||||||
profile.setEspProposal(cursor.getString(cursor.getColumnIndexOrThrow(KEY_ESP_PROPOSAL)));
|
profile.setEspProposal(cursor.getString(cursor.getColumnIndexOrThrow(KEY_ESP_PROPOSAL)));
|
||||||
profile.setDnsServers(cursor.getString(cursor.getColumnIndexOrThrow(KEY_DNS_SERVERS)));
|
profile.setDnsServers(cursor.getString(cursor.getColumnIndexOrThrow(KEY_DNS_SERVERS)));
|
||||||
profile.setProxyHost(cursor.getString(cursor.getColumnIndexOrThrow(KEY_PROXY_HOST)));
|
|
||||||
profile.setProxyPort(getInt(cursor, cursor.getColumnIndexOrThrow(KEY_PROXY_PORT)));
|
|
||||||
profile.setProxyExclusions(cursor.getString(cursor.getColumnIndexOrThrow(KEY_PROXY_EXCLUSIONS)));
|
|
||||||
return profile;
|
return profile;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -182,9 +179,6 @@ public class VpnProfileSqlDataSource implements VpnProfileDataSource
|
|||||||
values.put(KEY_IKE_PROPOSAL, profile.getIkeProposal());
|
values.put(KEY_IKE_PROPOSAL, profile.getIkeProposal());
|
||||||
values.put(KEY_ESP_PROPOSAL, profile.getEspProposal());
|
values.put(KEY_ESP_PROPOSAL, profile.getEspProposal());
|
||||||
values.put(KEY_DNS_SERVERS, profile.getDnsServers());
|
values.put(KEY_DNS_SERVERS, profile.getDnsServers());
|
||||||
values.put(KEY_PROXY_HOST, profile.getProxyHost());
|
|
||||||
values.put(KEY_PROXY_PORT, profile.getProxyPort());
|
|
||||||
values.put(KEY_PROXY_EXCLUSIONS, profile.getProxyExclusions());
|
|
||||||
return values;
|
return values;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2012-2025 Tobias Brunner
|
* Copyright (C) 2012-2018 Tobias Brunner
|
||||||
* Copyright (C) 2012 Giuliano Grassi
|
* Copyright (C) 2012 Giuliano Grassi
|
||||||
* Copyright (C) 2012 Ralf Sager
|
* Copyright (C) 2012 Ralf Sager
|
||||||
*
|
*
|
||||||
@ -30,7 +30,6 @@ import android.content.Intent;
|
|||||||
import android.content.ServiceConnection;
|
import android.content.ServiceConnection;
|
||||||
import android.content.SharedPreferences;
|
import android.content.SharedPreferences;
|
||||||
import android.content.pm.PackageManager;
|
import android.content.pm.PackageManager;
|
||||||
import android.net.ProxyInfo;
|
|
||||||
import android.net.VpnService;
|
import android.net.VpnService;
|
||||||
import android.os.Build;
|
import android.os.Build;
|
||||||
import android.os.Bundle;
|
import android.os.Bundle;
|
||||||
@ -73,8 +72,6 @@ import java.security.PrivateKey;
|
|||||||
import java.security.cert.CertificateEncodingException;
|
import java.security.cert.CertificateEncodingException;
|
||||||
import java.security.cert.X509Certificate;
|
import java.security.cert.X509Certificate;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Locale;
|
import java.util.Locale;
|
||||||
import java.util.SortedSet;
|
import java.util.SortedSet;
|
||||||
@ -402,24 +399,11 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
|||||||
public void run()
|
public void run()
|
||||||
{
|
{
|
||||||
mShowNotification = false;
|
mShowNotification = false;
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.N)
|
stopForeground(true);
|
||||||
{
|
|
||||||
stopForegroundCompat();
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
stopForeground(STOP_FOREGROUND_REMOVE);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
private void stopForegroundCompat()
|
|
||||||
{
|
|
||||||
stopForeground(true);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a notification channel for Android 8+
|
* Create a notification channel for Android 8+
|
||||||
*/
|
*/
|
||||||
@ -1119,7 +1103,6 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
|||||||
private final List<InetAddress> mDnsServers = new ArrayList<>();
|
private final List<InetAddress> mDnsServers = new ArrayList<>();
|
||||||
private int mMtu;
|
private int mMtu;
|
||||||
private boolean mIPv4Seen, mIPv6Seen, mDnsServersConfigured;
|
private boolean mIPv4Seen, mIPv6Seen, mDnsServersConfigured;
|
||||||
private ProxyInfo mProxyServer;
|
|
||||||
|
|
||||||
public BuilderCache(VpnProfile profile)
|
public BuilderCache(VpnProfile profile)
|
||||||
{
|
{
|
||||||
@ -1173,17 +1156,6 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (profile.getProxyHost() != null)
|
|
||||||
{
|
|
||||||
int port = profile.getProxyPort() != null ? profile.getProxyPort() : Constants.PROXY_PORT_DEFAULT;
|
|
||||||
List<String> exclusions = new ArrayList<>();
|
|
||||||
if (profile.getProxyExclusions() != null)
|
|
||||||
{
|
|
||||||
Collections.addAll(exclusions, profile.getProxyExclusions().split("\\s+"));
|
|
||||||
}
|
|
||||||
mProxyServer = ProxyInfo.buildDirectProxy(profile.getProxyHost(), port, exclusions);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* set a default MTU, will be set by the daemon for regular interfaces */
|
/* set a default MTU, will be set by the daemon for regular interfaces */
|
||||||
Integer mtu = profile.getMTU();
|
Integer mtu = profile.getMTU();
|
||||||
mMtu = mtu == null ? Constants.MTU_MAX : mtu;
|
mMtu = mtu == null ? Constants.MTU_MAX : mtu;
|
||||||
@ -1264,7 +1236,7 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public void applyData(Builder builder)
|
public void applyData(VpnService.Builder builder)
|
||||||
{
|
{
|
||||||
for (IPRange address : mAddresses)
|
for (IPRange address : mAddresses)
|
||||||
{
|
{
|
||||||
@ -1390,10 +1362,6 @@ public class CharonVpnService extends VpnService implements Runnable, VpnStateSe
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q && mProxyServer != null)
|
|
||||||
{
|
|
||||||
builder.setHttpProxy(mProxyServer);
|
|
||||||
}
|
|
||||||
builder.setMtu(mMtu);
|
builder.setMtu(mMtu);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
package org.strongswan.android.logic;
|
package org.strongswan.android.logic;
|
||||||
|
|
||||||
import android.annotation.SuppressLint;
|
|
||||||
import android.app.AlarmManager;
|
import android.app.AlarmManager;
|
||||||
import android.app.PendingIntent;
|
import android.app.PendingIntent;
|
||||||
import android.content.BroadcastReceiver;
|
import android.content.BroadcastReceiver;
|
||||||
@ -38,7 +37,6 @@ public class Scheduler extends BroadcastReceiver
|
|||||||
private final AlarmManager mManager;
|
private final AlarmManager mManager;
|
||||||
private final PriorityQueue<ScheduledJob> mJobs;
|
private final PriorityQueue<ScheduledJob> mJobs;
|
||||||
|
|
||||||
@SuppressLint("UnspecifiedRegisterReceiverFlag")
|
|
||||||
public Scheduler(Context context)
|
public Scheduler(Context context)
|
||||||
{
|
{
|
||||||
mContext = context;
|
mContext = context;
|
||||||
@ -47,14 +45,7 @@ public class Scheduler extends BroadcastReceiver
|
|||||||
|
|
||||||
IntentFilter filter = new IntentFilter();
|
IntentFilter filter = new IntentFilter();
|
||||||
filter.addAction(EXECUTE_JOB);
|
filter.addAction(EXECUTE_JOB);
|
||||||
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.TIRAMISU)
|
mContext.registerReceiver(this, filter);
|
||||||
{
|
|
||||||
mContext.registerReceiver(this, filter, Context.RECEIVER_NOT_EXPORTED);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
mContext.registerReceiver(this, filter);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -22,7 +22,6 @@ import java.io.IOException;
|
|||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.io.OutputStream;
|
import java.io.OutputStream;
|
||||||
import java.net.HttpURLConnection;
|
import java.net.HttpURLConnection;
|
||||||
import java.net.Proxy;
|
|
||||||
import java.net.SocketTimeoutException;
|
import java.net.SocketTimeoutException;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
@ -56,7 +55,7 @@ public class SimpleFetcher
|
|||||||
}
|
}
|
||||||
future = mExecutor.submit(() -> {
|
future = mExecutor.submit(() -> {
|
||||||
URL url = new URL(uri);
|
URL url = new URL(uri);
|
||||||
HttpURLConnection conn = (HttpURLConnection) url.openConnection(Proxy.NO_PROXY);
|
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
|
||||||
conn.setConnectTimeout(10000);
|
conn.setConnectTimeout(10000);
|
||||||
conn.setReadTimeout(10000);
|
conn.setReadTimeout(10000);
|
||||||
conn.setRequestProperty("Connection", "close");
|
conn.setRequestProperty("Connection", "close");
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2023 Relution GmbH
|
* Copyright (C) 2023 Relution GmbH
|
||||||
* Copyright (C) 2014-2025 Tobias Brunner
|
* Copyright (C) 2014-2024 Tobias Brunner
|
||||||
*
|
*
|
||||||
* Copyright (C) secunet Security Networks AG
|
* Copyright (C) secunet Security Networks AG
|
||||||
*
|
*
|
||||||
@ -38,10 +38,14 @@ import java.util.concurrent.Executor;
|
|||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.Executors;
|
import java.util.concurrent.Executors;
|
||||||
|
|
||||||
|
import androidx.annotation.NonNull;
|
||||||
import androidx.core.os.HandlerCompat;
|
import androidx.core.os.HandlerCompat;
|
||||||
|
import androidx.lifecycle.DefaultLifecycleObserver;
|
||||||
|
import androidx.lifecycle.LifecycleOwner;
|
||||||
|
import androidx.lifecycle.ProcessLifecycleOwner;
|
||||||
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
|
import androidx.localbroadcastmanager.content.LocalBroadcastManager;
|
||||||
|
|
||||||
public class StrongSwanApplication extends Application
|
public class StrongSwanApplication extends Application implements DefaultLifecycleObserver
|
||||||
{
|
{
|
||||||
private static final String TAG = StrongSwanApplication.class.getSimpleName();
|
private static final String TAG = StrongSwanApplication.class.getSimpleName();
|
||||||
|
|
||||||
@ -76,7 +80,6 @@ public class StrongSwanApplication extends Application
|
|||||||
public void onCreate()
|
public void onCreate()
|
||||||
{
|
{
|
||||||
super.onCreate();
|
super.onCreate();
|
||||||
|
|
||||||
StrongSwanApplication.mContext = getApplicationContext();
|
StrongSwanApplication.mContext = getApplicationContext();
|
||||||
StrongSwanApplication.mInstance = this;
|
StrongSwanApplication.mInstance = this;
|
||||||
|
|
||||||
@ -89,12 +92,24 @@ public class StrongSwanApplication extends Application
|
|||||||
|
|
||||||
mUserCertificateManager = new ManagedUserCertificateManager(mContext, mManagedConfigurationService, mDatabaseHelper);
|
mUserCertificateManager = new ManagedUserCertificateManager(mContext, mManagedConfigurationService, mDatabaseHelper);
|
||||||
|
|
||||||
|
ProcessLifecycleOwner.get().getLifecycle().addObserver(this);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onResume(@NonNull LifecycleOwner owner)
|
||||||
|
{
|
||||||
reloadManagedConfigurationAndNotifyListeners();
|
reloadManagedConfigurationAndNotifyListeners();
|
||||||
|
|
||||||
final IntentFilter restrictionsFilter = new IntentFilter(Intent.ACTION_APPLICATION_RESTRICTIONS_CHANGED);
|
final IntentFilter restrictionsFilter = new IntentFilter(Intent.ACTION_APPLICATION_RESTRICTIONS_CHANGED);
|
||||||
registerReceiver(mRestrictionsReceiver, restrictionsFilter);
|
registerReceiver(mRestrictionsReceiver, restrictionsFilter);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void onPause(@NonNull LifecycleOwner owner)
|
||||||
|
{
|
||||||
|
unregisterReceiver(mRestrictionsReceiver);
|
||||||
|
}
|
||||||
|
|
||||||
private void reloadManagedConfigurationAndNotifyListeners()
|
private void reloadManagedConfigurationAndNotifyListeners()
|
||||||
{
|
{
|
||||||
final Set<String> uuids = new HashSet<>(mManagedConfigurationService.getManagedProfiles().keySet());
|
final Set<String> uuids = new HashSet<>(mManagedConfigurationService.getManagedProfiles().keySet());
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
/*
|
/*
|
||||||
* Copyright (C) 2012-2024 Tobias Brunner
|
* Copyright (C) 2012-2015 Tobias Brunner
|
||||||
* Copyright (C) 2012 Giuliano Grassi
|
* Copyright (C) 2012 Giuliano Grassi
|
||||||
* Copyright (C) 2012 Ralf Sager
|
* Copyright (C) 2012 Ralf Sager
|
||||||
*
|
*
|
||||||
@ -20,8 +20,6 @@ package org.strongswan.android.logic;
|
|||||||
|
|
||||||
import android.util.Log;
|
import android.util.Log;
|
||||||
|
|
||||||
import java.beans.PropertyChangeListener;
|
|
||||||
import java.beans.PropertyChangeSupport;
|
|
||||||
import java.security.KeyStore;
|
import java.security.KeyStore;
|
||||||
import java.security.KeyStoreException;
|
import java.security.KeyStoreException;
|
||||||
import java.security.cert.Certificate;
|
import java.security.cert.Certificate;
|
||||||
@ -29,9 +27,10 @@ import java.security.cert.X509Certificate;
|
|||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Enumeration;
|
import java.util.Enumeration;
|
||||||
import java.util.Hashtable;
|
import java.util.Hashtable;
|
||||||
|
import java.util.Observable;
|
||||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||||
|
|
||||||
public class TrustedCertificateManager
|
public class TrustedCertificateManager extends Observable
|
||||||
{
|
{
|
||||||
private static final String TAG = TrustedCertificateManager.class.getSimpleName();
|
private static final String TAG = TrustedCertificateManager.class.getSimpleName();
|
||||||
private final ReentrantReadWriteLock mLock = new ReentrantReadWriteLock();
|
private final ReentrantReadWriteLock mLock = new ReentrantReadWriteLock();
|
||||||
@ -39,7 +38,6 @@ public class TrustedCertificateManager
|
|||||||
private volatile boolean mReload;
|
private volatile boolean mReload;
|
||||||
private boolean mLoaded;
|
private boolean mLoaded;
|
||||||
private final ArrayList<KeyStore> mKeyStores = new ArrayList<KeyStore>();
|
private final ArrayList<KeyStore> mKeyStores = new ArrayList<KeyStore>();
|
||||||
private PropertyChangeSupport mObservers = new PropertyChangeSupport(this);
|
|
||||||
|
|
||||||
public enum TrustedCertificateSource
|
public enum TrustedCertificateSource
|
||||||
{
|
{
|
||||||
@ -100,35 +98,6 @@ public class TrustedCertificateManager
|
|||||||
return Singleton.mInstance;
|
return Singleton.mInstance;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Add an observer for changes to the trusted certificate store. There will
|
|
||||||
* be a "storeChanged" property "change" when anything in the store changed.
|
|
||||||
*
|
|
||||||
* @param observer observer to add
|
|
||||||
*/
|
|
||||||
public void addObserver(PropertyChangeListener observer)
|
|
||||||
{
|
|
||||||
mObservers.addPropertyChangeListener(observer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Remove an observer for changes to the trusted certificate store.
|
|
||||||
*
|
|
||||||
* @param observer observer to remove
|
|
||||||
*/
|
|
||||||
public void deleteObserver(PropertyChangeListener observer)
|
|
||||||
{
|
|
||||||
mObservers.removePropertyChangeListener(observer);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Use a fake property with a forced change to notify observers.
|
|
||||||
*/
|
|
||||||
private void notifyObservers()
|
|
||||||
{
|
|
||||||
mObservers.firePropertyChange("storeChanged", false, true);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Invalidates the current load state so that the next call to load()
|
* Invalidates the current load state so that the next call to load()
|
||||||
* will force a reload of the cached CA certificates.
|
* will force a reload of the cached CA certificates.
|
||||||
@ -141,6 +110,7 @@ public class TrustedCertificateManager
|
|||||||
{
|
{
|
||||||
Log.d(TAG, "Force reload of cached CA certificates on next load");
|
Log.d(TAG, "Force reload of cached CA certificates on next load");
|
||||||
this.mReload = true;
|
this.mReload = true;
|
||||||
|
this.setChanged();
|
||||||
this.notifyObservers();
|
this.notifyObservers();
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
@ -182,6 +152,7 @@ public class TrustedCertificateManager
|
|||||||
this.mCACerts = certs;
|
this.mCACerts = certs;
|
||||||
if (!this.mLoaded)
|
if (!this.mLoaded)
|
||||||
{
|
{
|
||||||
|
this.setChanged();
|
||||||
this.notifyObservers();
|
this.notifyObservers();
|
||||||
this.mLoaded = true;
|
this.mLoaded = true;
|
||||||
}
|
}
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
|
|
||||||
package org.strongswan.android.ui;
|
package org.strongswan.android.ui;
|
||||||
|
|
||||||
import android.os.Build;
|
|
||||||
import android.os.Bundle;
|
import android.os.Bundle;
|
||||||
import android.view.LayoutInflater;
|
import android.view.LayoutInflater;
|
||||||
import android.view.View;
|
import android.view.View;
|
||||||
@ -59,14 +58,7 @@ public class RemediationInstructionFragment extends ListFragment
|
|||||||
|
|
||||||
if (savedInstanceState != null)
|
if (savedInstanceState != null)
|
||||||
{
|
{
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
mInstruction = savedInstanceState.getParcelable(ARG_REMEDIATION_INSTRUCTION);
|
||||||
{
|
|
||||||
mInstruction = getInstructionCompat(savedInstanceState);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
mInstruction = savedInstanceState.getParcelable(ARG_REMEDIATION_INSTRUCTION, RemediationInstruction.class);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
/* show dividers only between list items */
|
/* show dividers only between list items */
|
||||||
getListView().setHeaderDividersEnabled(false);
|
getListView().setHeaderDividersEnabled(false);
|
||||||
@ -93,14 +85,7 @@ public class RemediationInstructionFragment extends ListFragment
|
|||||||
Bundle args = getArguments();
|
Bundle args = getArguments();
|
||||||
if (args != null)
|
if (args != null)
|
||||||
{
|
{
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
mInstruction = args.getParcelable(ARG_REMEDIATION_INSTRUCTION);
|
||||||
{
|
|
||||||
mInstruction = getInstructionCompat(args);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
mInstruction = args.getParcelable(ARG_REMEDIATION_INSTRUCTION, RemediationInstruction.class);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
updateView(mInstruction);
|
updateView(mInstruction);
|
||||||
}
|
}
|
||||||
@ -132,10 +117,4 @@ public class RemediationInstructionFragment extends ListFragment
|
|||||||
setListAdapter(null);
|
setListAdapter(null);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
private static RemediationInstruction getInstructionCompat(Bundle bundle)
|
|
||||||
{
|
|
||||||
return bundle.getParcelable(ARG_REMEDIATION_INSTRUCTION);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -43,16 +43,7 @@ public class RemediationInstructionsActivity extends AppCompatActivity implement
|
|||||||
if (frag != null)
|
if (frag != null)
|
||||||
{ /* two-pane layout, update fragment */
|
{ /* two-pane layout, update fragment */
|
||||||
Bundle extras = getIntent().getExtras();
|
Bundle extras = getIntent().getExtras();
|
||||||
ArrayList<RemediationInstruction> list = null;
|
ArrayList<RemediationInstruction> list = extras.getParcelableArrayList(RemediationInstructionsFragment.EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||||
if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.TIRAMISU)
|
|
||||||
{
|
|
||||||
list = RemediationInstructionsFragment.getInstructionsCompat(extras);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
list = extras.getParcelableArrayList(RemediationInstructionsFragment.EXTRA_REMEDIATION_INSTRUCTIONS,
|
|
||||||
RemediationInstruction.class);
|
|
||||||
}
|
|
||||||
frag.updateView(list);
|
frag.updateView(list);
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
|
@ -17,7 +17,6 @@
|
|||||||
package org.strongswan.android.ui;
|
package org.strongswan.android.ui;
|
||||||
|
|
||||||
import android.content.Context;
|
import android.content.Context;
|
||||||
import android.os.Build;
|
|
||||||
import android.os.Bundle;
|
import android.os.Bundle;
|
||||||
import android.view.View;
|
import android.view.View;
|
||||||
import android.widget.ListView;
|
import android.widget.ListView;
|
||||||
@ -56,14 +55,7 @@ public class RemediationInstructionsFragment extends ListFragment
|
|||||||
|
|
||||||
if (savedInstanceState != null)
|
if (savedInstanceState != null)
|
||||||
{
|
{
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
mInstructions = savedInstanceState.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||||
{
|
|
||||||
mInstructions = getInstructionsCompat(savedInstanceState);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
mInstructions = savedInstanceState.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS, RemediationInstruction.class);
|
|
||||||
}
|
|
||||||
mCurrentPosition = savedInstanceState.getInt(KEY_POSITION);
|
mCurrentPosition = savedInstanceState.getInt(KEY_POSITION);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -101,14 +93,7 @@ public class RemediationInstructionsFragment extends ListFragment
|
|||||||
Bundle args = getArguments();
|
Bundle args = getArguments();
|
||||||
if (mInstructions == null && args != null)
|
if (mInstructions == null && args != null)
|
||||||
{
|
{
|
||||||
if (Build.VERSION.SDK_INT < Build.VERSION_CODES.TIRAMISU)
|
mInstructions = args.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS);
|
||||||
{
|
|
||||||
mInstructions = getInstructionsCompat(args);
|
|
||||||
}
|
|
||||||
else
|
|
||||||
{
|
|
||||||
mInstructions = args.getParcelableArrayList(EXTRA_REMEDIATION_INSTRUCTIONS, RemediationInstruction.class);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
updateView(mInstructions);
|
updateView(mInstructions);
|
||||||
|
|
||||||
@ -138,10 +123,4 @@ public class RemediationInstructionsFragment extends ListFragment
|
|||||||
mInstructions = instructions;
|
mInstructions = instructions;
|
||||||
mAdapter.setData(mInstructions);
|
mAdapter.setData(mInstructions);
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
|
||||||
public static ArrayList<RemediationInstruction> getInstructionsCompat(Bundle bundle)
|
|
||||||
{
|
|
||||||
return bundle.getParcelableArrayList(RemediationInstructionsFragment.EXTRA_REMEDIATION_INSTRUCTIONS);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user