diff --git a/.codespellrc b/.codespellrc new file mode 100644 index 000000000..20b9b2ed7 --- /dev/null +++ b/.codespellrc @@ -0,0 +1,5 @@ +[codespell] +skip = .git,*.pdf,*.svg,.codespellrc +check-hidden = true +ignore-regex = \bhenrik@gassmann.onl\b +ignore-words-list = te diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index 9cc708368..33608e6bf 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -27,7 +27,6 @@ RUN apt-get update && export DEBIAN_FRONTEND=noninteractive \ && apt-get -y install --no-install-recommends \ libboost-dev \ libsodium-dev \ - libncurses5-dev \ libprotobuf-dev \ protobuf-compiler \ libgflags-dev \ diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index 762900706..3756343bc 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -6,7 +6,7 @@ "dockerfile": "Dockerfile", // Update 'VARIANT' to pick an Debian / Ubuntu OS version: debian-11, debian-10, debian-9, ubuntu-21.04, ubuntu-20.04, ubuntu-18.04 // Use Debian 11, Debian 9, Ubuntu 18.04 or Ubuntu 21.04 on local arm64/Apple Silicon - "args": { "VARIANT": "ubuntu-21.04" } + "args": { "VARIANT": "debian-11" } }, "runArgs": ["--cap-add=SYS_PTRACE", "--security-opt", "seccomp=unconfined"], diff --git a/.github/workflows/clang-format-check.yml b/.github/workflows/clang-format-check.yml index a393f52d2..0f41d3856 100644 --- a/.github/workflows/clang-format-check.yml +++ b/.github/workflows/clang-format-check.yml @@ -1,5 +1,5 @@ name: clang-format Check -on: [push, pull_request] +on: [pull_request] jobs: formatting-check: name: Formatting Check @@ -11,10 +11,10 @@ jobs: - 'test' - 'proto' steps: - - uses: actions/checkout@v3 + - uses: actions/checkout@v4 - name: Run clang-format style check for C/C++/Protobuf source code. - uses: jidicula/clang-format-action@v4.9.0 + uses: jidicula/clang-format-action@v4.11.0 with: - clang-format-version: '13' + clang-format-version: '18' check-path: ${{ matrix.path }} fallback-style: 'Google' diff --git a/.github/workflows/codecov.yml b/.github/workflows/codecov.yml new file mode 100644 index 000000000..caf1f0f54 --- /dev/null +++ b/.github/workflows/codecov.yml @@ -0,0 +1,57 @@ +name: Linux CI + +on: + push: + branches: + - master + pull_request: + +jobs: + codecov: + runs-on: ubuntu-22.04 + + steps: + - uses: actions/checkout@v4 + - name: Setup + shell: bash + run: | + mkdir -p ~/.ssh/ + echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config + sudo apt-get update + sudo DEBIAN_FRONTEND=noninteractive ACCEPT_EULA=Y apt-get install -y curl zip unzip tar libssl-dev libcurl4-openssl-dev libunwind-dev git cmake ninja-build gdb protobuf-compiler libsodium-dev libgflags-dev libprotobuf-dev libutempter-dev g++ lcov + if [[ -z "${ACT}" ]]; then auth_header="$(git config --local --get http.https://github.com/.extraheader)"; fi + git submodule sync --recursive + git submodule update --init --force --recursive + + # Restore both vcpkg and its artifacts from the GitHub cache service. + - name: Restore vcpkg and its artifacts. + uses: actions/cache@v4 + with: + # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. + # The second path is the location of vcpkg (it contains the vcpkg executable and data files). + # The other paths starting with '!' are exclusions: they contain temporary files generated during the build of the installed packages. + path: | + ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ + ${{ env.VCPKG_ROOT }} + !${{ env.VCPKG_ROOT }}/buildtrees + !${{ env.VCPKG_ROOT }}/packages + !${{ env.VCPKG_ROOT }}/downloads + # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. + # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. + # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). + key: | + et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux-codecov + + - name: Build with code coverage + run: | + mkdir build + pushd build + cmake -DCODE_COVERAGE=ON ../ + make -j`nproc` + ./et-test + lcov --capture --directory . --output-file coverage.info + lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter system-files + lcov --list coverage.info # debug info + # Uploading report to CodeCov + bash <(curl -s https://codecov.io/bash) -f coverage.info || echo "Codecov did not collect coverage reports" + popd diff --git a/.github/workflows/codespell.yml b/.github/workflows/codespell.yml new file mode 100644 index 000000000..c5e16043e --- /dev/null +++ b/.github/workflows/codespell.yml @@ -0,0 +1,22 @@ +--- +name: Codespell + +on: + push: + branches: [master] + pull_request: + branches: [master] + +permissions: + contents: read + +jobs: + codespell: + name: Check for spelling errors + runs-on: ubuntu-latest + + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Codespell + uses: codespell-project/actions-codespell@v2 diff --git a/.github/workflows/linux_ci.yml b/.github/workflows/linux_ci.yml index bc6bcdf64..74687483d 100644 --- a/.github/workflows/linux_ci.yml +++ b/.github/workflows/linux_ci.yml @@ -7,11 +7,14 @@ on: pull_request: jobs: - ubsan_linux: - runs-on: ubuntu-20.04 - + linux_ci: + runs-on: ubuntu-22.04 + strategy: + fail-fast: true + matrix: + sanitize: [ubsan, asan, tsan, msan] steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup shell: bash run: | @@ -19,62 +22,27 @@ jobs: echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config sudo apt-get update sudo DEBIAN_FRONTEND=noninteractive ACCEPT_EULA=Y apt-get install -y curl zip unzip tar libssl-dev libcurl4-openssl-dev libunwind-dev git cmake ninja-build gdb protobuf-compiler libsodium-dev libgflags-dev libprotobuf-dev libutempter-dev g++ - auth_header="$(git config --local --get http.https://github.com/.extraheader)" - git submodule sync --recursive - git submodule update --init --force --recursive - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 - with: - # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. - # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux + echo "Host localhost\n Port 2222\n\n" >> ~/.ssh/config - - name: Test with ubsan - run: | - mkdir build - pushd build - cmake -DSANITIZE_UNDEFINED=ON ../ - make -j`nproc` - TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test - popd - rm -Rf build + sudo /usr/sbin/sshd -p 2222 - asan_linux: - runs-on: ubuntu-20.04 + ssh-keygen -t rsa -f ~/.ssh/id_rsa -P "" -N "" + cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys + cat ~/.ssh/id_rsa.pub >> ~/.ssh/known_hosts + ssh -vvvvvvv -o "StrictHostKeyChecking no" -o 'PreferredAuthentications=publickey' localhost "echo foobar" # Fails if we can't ssh into localhost without a password + if [[ -z "${ACT}" ]]; then auth_header="$(git config --local --get http.https://github.com/.extraheader)"; fi - steps: - - uses: actions/checkout@v2 - - name: Setup - shell: bash - run: | - mkdir -p ~/.ssh/ - echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - sudo apt-get update - sudo DEBIAN_FRONTEND=noninteractive ACCEPT_EULA=Y apt-get install -y curl zip unzip tar libssl-dev libcurl4-openssl-dev libunwind-dev git cmake ninja-build gdb protobuf-compiler libsodium-dev libgflags-dev libprotobuf-dev libutempter-dev g++ - auth_header="$(git config --local --get http.https://github.com/.extraheader)" git submodule sync --recursive git submodule update --init --force --recursive # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. + # The other paths starting with '!' are exclusions: they contain temporary files generated during the build of the installed packages. path: | ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ ${{ env.VCPKG_ROOT }} @@ -85,153 +53,48 @@ jobs: # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). key: | - et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux + et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux-${{ matrix.sanitize }} - - name: Test with asan + - name: Build with ubsan run: | mkdir build pushd build - cmake -DSANITIZE_ADDRESS=ON ../ + cmake -DSANITIZE_UNDEFINED=ON ../ make -j`nproc` - TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd - rm -Rf build - - msan_linux: - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v2 - - name: Setup - shell: bash - run: | - mkdir -p ~/.ssh/ - echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - sudo apt-get update - sudo DEBIAN_FRONTEND=noninteractive ACCEPT_EULA=Y apt-get install -y curl zip unzip tar libssl-dev libcurl4-openssl-dev libunwind-dev git cmake ninja-build gdb protobuf-compiler libsodium-dev libgflags-dev libprotobuf-dev libutempter-dev g++ - auth_header="$(git config --local --get http.https://github.com/.extraheader)" - git submodule sync --recursive - git submodule update --init --force --recursive - - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 - with: - # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. - # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux + ./test/system_tests/connect_with_jumphost.sh + TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./build/et-test + if: matrix.sanitize == 'ubsan' - - name: Test with msan + - name: Build with asan run: | mkdir build pushd build - cmake -DSANITIZE_MEMORY=ON ../ + cmake -DSANITIZE_ADDRESS=ON ../ make -j`nproc` - TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd - rm -Rf build - - tsan_linux: - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v2 - - name: Setup - shell: bash - run: | - mkdir -p ~/.ssh/ - echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - sudo apt-get update - sudo DEBIAN_FRONTEND=noninteractive ACCEPT_EULA=Y apt-get install -y curl zip unzip tar libssl-dev libcurl4-openssl-dev libunwind-dev git cmake ninja-build gdb protobuf-compiler libsodium-dev libgflags-dev libprotobuf-dev libutempter-dev g++ - auth_header="$(git config --local --get http.https://github.com/.extraheader)" - git submodule sync --recursive - git submodule update --init --force --recursive - - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 - with: - # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. - # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux + ./test/system_tests/connect_with_jumphost.sh + TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./build/et-test + if: matrix.sanitize == 'asan' - - name: Test with tsan + - name: Build with msan run: | mkdir build pushd build - cmake -DSANITIZE_THREAD=ON -DSANITIZE_LINK_STATIC=ON ../ + cmake -DSANITIZE_MEMORY=ON ../ make -j`nproc` - TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd - rm -Rf build - - codecov: - runs-on: ubuntu-20.04 - - steps: - - uses: actions/checkout@v2 - - name: Setup - shell: bash - run: | - mkdir -p ~/.ssh/ - echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - sudo apt-get update - sudo DEBIAN_FRONTEND=noninteractive ACCEPT_EULA=Y apt-get install -y curl zip unzip tar libssl-dev libcurl4-openssl-dev libunwind-dev git cmake ninja-build gdb protobuf-compiler libsodium-dev libgflags-dev libprotobuf-dev libutempter-dev g++ lcov - auth_header="$(git config --local --get http.https://github.com/.extraheader)" - git submodule sync --recursive - git submodule update --init --force --recursive - - # Restore both vcpkg and its artifacts from the GitHub cache service. - - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 - with: - # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. - # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. - path: | - ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ - ${{ env.VCPKG_ROOT }} - !${{ env.VCPKG_ROOT }}/buildtrees - !${{ env.VCPKG_ROOT }}/packages - !${{ env.VCPKG_ROOT }}/downloads - # The key is composed in a way that it gets properly invalidated: this must happen whenever vcpkg's Git commit id changes, or the list of packages changes. In this case a cache miss must happen and a new entry with a new key with be pushed to GitHub the cache service. - # The key includes: hash of the vcpkg.json file, the hash of the vcpkg Git commit id, and the used vcpkg's triplet. The vcpkg's commit id would suffice, but computing an hash out it does not harm. - # Note: given a key, the cache content is immutable. If a cache entry has been created improperly, in order the recreate the right content the key must be changed as well, and it must be brand new (i.e. not existing already). - key: | - et-vcpkg-${{ hashFiles( 'vcpkg.json' ) }}-${{ hashFiles( '.git/modules/external/vcpkg/HEAD' )}}-linux + ./test/system_tests/connect_with_jumphost.sh + TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./build/et-test + if: matrix.sanitize == 'msan' - - name: Test with code coverage + - name: Build with tsan run: | mkdir build pushd build - cmake -DCODE_COVERAGE=ON ../ + cmake -DSANITIZE_THREAD=ON -DSANITIZE_LINK_STATIC=ON ../ make -j`nproc` - ./et-test - lcov --capture --directory . --output-file coverage.info - lcov --remove coverage.info '/usr/*' --output-file coverage.info # filter system-files - lcov --list coverage.info # debug info - # Uploading report to CodeCov - bash <(curl -s https://codecov.io/bash) -f coverage.info || echo "Codecov did not collect coverage reports" popd + ./test/system_tests/connect_with_jumphost.sh + TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./build/et-test + if: matrix.sanitize == 'tsan' diff --git a/.github/workflows/mac_ci.yml b/.github/workflows/mac_ci.yml index 83b6f8715..7bdf5028f 100644 --- a/.github/workflows/mac_ci.yml +++ b/.github/workflows/mac_ci.yml @@ -11,24 +11,24 @@ jobs: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup shell: bash run: | mkdir -p ~/.ssh/ echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - brew install cmake ninja; brew install openssl protobuf libsodium + brew install cmake ninja; brew install protobuf libsodium auth_header="$(git config --local --get http.https://github.com/.extraheader)" git submodule sync --recursive git submodule update --init --force --recursive # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. + # The other paths starting with '!' are exclusions: they contain temporary files generated during the build of the installed packages. path: | ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ ${{ env.VCPKG_ROOT }} @@ -46,7 +46,7 @@ jobs: run: | mkdir build pushd build - cmake -DSANITIZE_UNDEFINED=ON -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include -DOPENSSL_LIBRARIES=/usr/local/opt/openssl/lib ../ + cmake -DSANITIZE_UNDEFINED=ON ../ make -j`nproc` TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd @@ -56,7 +56,7 @@ jobs: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup shell: bash run: | @@ -65,15 +65,15 @@ jobs: git submodule update --init --force --recursive mkdir -p ~/.ssh/ echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - brew install cmake ninja; brew install openssl protobuf libsodium + brew install cmake ninja; brew install protobuf libsodium # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. + # The other paths starting with '!' are exclusions: they contain temporary files generated during the build of the installed packages. path: | ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ ${{ env.VCPKG_ROOT }} @@ -90,7 +90,7 @@ jobs: run: | mkdir build pushd build - cmake -DSANITIZE_ADDRESS=ON -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include -DOPENSSL_LIBRARIES=/usr/local/opt/openssl/lib ../ + cmake -DSANITIZE_ADDRESS=ON ../ make -j`nproc` TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd @@ -100,7 +100,7 @@ jobs: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup shell: bash run: | @@ -109,15 +109,15 @@ jobs: git submodule update --init --force --recursive mkdir -p ~/.ssh/ echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - brew install cmake ninja; brew install openssl protobuf libsodium + brew install cmake ninja; brew install protobuf libsodium # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. + # The other paths starting with '!' are exclusions: they contain temporary files generated during the build of the installed packages. path: | ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ ${{ env.VCPKG_ROOT }} @@ -134,7 +134,7 @@ jobs: run: | mkdir build pushd build - cmake -DSANITIZE_MEMORY=ON -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include -DOPENSSL_LIBRARIES=/usr/local/opt/openssl/lib ../ + cmake -DSANITIZE_MEMORY=ON ../ make -j`nproc` TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd @@ -144,7 +144,7 @@ jobs: runs-on: macos-latest steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 - name: Setup shell: bash run: | @@ -153,15 +153,15 @@ jobs: git submodule update --init --force --recursive mkdir -p ~/.ssh/ echo -e "Host github.com\n\tStrictHostKeyChecking no\n" >> ~/.ssh/config - brew install cmake ninja; brew install openssl protobuf libsodium + brew install cmake ninja; brew install protobuf libsodium # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: # The first path is where vcpkg generates artifacts while consuming the vcpkg.json manifest file. # The second path is the location of vcpkg (it contains the vcpkg executable and data files). - # The other paths starting with '!' are exclusions: they contain termporary files generated during the build of the installed packages. + # The other paths starting with '!' are exclusions: they contain temporary files generated during the build of the installed packages. path: | ${{ env.CMAKE_BUILD_DIR }}/vcpkg_installed/ ${{ env.VCPKG_ROOT }} @@ -178,7 +178,7 @@ jobs: run: | mkdir build pushd build - cmake -DSANITIZE_THREAD=ON -DSANITIZE_LINK_STATIC=ON -DOPENSSL_ROOT_DIR=/usr/local/opt/openssl -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl/include -DOPENSSL_LIBRARIES=/usr/local/opt/openssl/lib ../ + cmake -DSANITIZE_THREAD=ON -DSANITIZE_LINK_STATIC=ON ../ make -j`nproc` TSAN_OPTIONS="suppressions=../test/test_tsan.suppression" ./et-test popd diff --git a/.github/workflows/novcpkg_build_master.yml b/.github/workflows/novcpkg_build_master.yml index 3cb47d1f3..42429d641 100644 --- a/.github/workflows/novcpkg_build_master.yml +++ b/.github/workflows/novcpkg_build_master.yml @@ -38,7 +38,6 @@ jobs: sudo apt-get install --no-install-recommends \ libboost-dev \ libsodium-dev \ - libncurses5-dev \ libprotobuf-dev \ protobuf-compiler \ libgflags-dev \ @@ -58,7 +57,7 @@ jobs: run: brew update && brew install ninja cmake pkg-config curl openssl protobuf libsodium if: matrix.os == 'macos-latest' - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: recursive @@ -90,7 +89,7 @@ jobs: run: | cmake --build "${{ env.CMAKE_BUILD_DIR }}" - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: - name: et-client-${{matrix.os}} + name: et-client-${{matrix.os}}-gcc${{matrix.gcc}} path: ${{ env.CMAKE_BUILD_DIR }}/et${{matrix.extension}} diff --git a/.github/workflows/novcpkg_build_release.yml b/.github/workflows/novcpkg_build_release.yml index 36915b33b..3abe30e70 100644 --- a/.github/workflows/novcpkg_build_release.yml +++ b/.github/workflows/novcpkg_build_release.yml @@ -4,6 +4,8 @@ on: branches: - release pull_request: + branches: + - release jobs: build_novcpkg: @@ -38,7 +40,6 @@ jobs: sudo apt-get install --no-install-recommends \ libboost-dev \ libsodium-dev \ - libncurses5-dev \ libprotobuf-dev \ protobuf-compiler \ libgflags-dev \ @@ -58,7 +59,7 @@ jobs: run: brew update && brew install ninja cmake pkg-config curl openssl protobuf libsodium if: matrix.os == 'macos-latest' - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: false @@ -90,7 +91,7 @@ jobs: run: | cmake --build "${{ env.CMAKE_BUILD_DIR }}" - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: - name: et-client-${{matrix.os}} + name: et-client-${{matrix.os}}-gcc${{matrix.gcc}} path: ${{ env.CMAKE_BUILD_DIR }}/et${{matrix.extension}} diff --git a/.github/workflows/vcpkg_build_master.yml b/.github/workflows/vcpkg_build_master.yml index cd3443497..1b5c6ac9b 100644 --- a/.github/workflows/vcpkg_build_master.yml +++ b/.github/workflows/vcpkg_build_master.yml @@ -43,7 +43,6 @@ jobs: sudo apt-get install --no-install-recommends \ libboost-dev \ libsodium-dev \ - libncurses5-dev \ libprotobuf-dev \ protobuf-compiler \ libgflags-dev \ @@ -60,13 +59,13 @@ jobs: libunwind-dev if: matrix.os == 'ubuntu-latest' - name: Install Dependencies (Windows) - run: choco install ninja + run: choco install -y ninja if: matrix.os == 'windows-latest' - name: Install Dependencies (macOS) run: brew update && brew install ninja cmake if: matrix.os == 'macos-latest' - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: recursive @@ -79,7 +78,7 @@ jobs: # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ${{ env.VCPKG_ROOT }} @@ -122,7 +121,7 @@ jobs: run: | cmake --build "${{ env.CMAKE_BUILD_DIR }}" - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: - name: et-client-${{matrix.os}} + name: et-client-${{matrix.os}}-gcc${{matrix.gcc}} path: ${{ env.CMAKE_BUILD_DIR }}/et${{matrix.extension}} diff --git a/.github/workflows/vcpkg_build_release.yml b/.github/workflows/vcpkg_build_release.yml index 2e850af52..aad7eabaa 100644 --- a/.github/workflows/vcpkg_build_release.yml +++ b/.github/workflows/vcpkg_build_release.yml @@ -4,6 +4,8 @@ on: branches: - release pull_request: + branches: + - release jobs: build_vcpkg: @@ -43,7 +45,6 @@ jobs: sudo apt-get install --no-install-recommends \ libboost-dev \ libsodium-dev \ - libncurses5-dev \ libprotobuf-dev \ protobuf-compiler \ libgflags-dev \ @@ -60,13 +61,13 @@ jobs: libunwind-dev if: matrix.os == 'ubuntu-latest' - name: Install Dependencies (Windows) - run: choco install ninja + run: choco install -y ninja if: matrix.os == 'windows-latest' - name: Install Dependencies (macOS) run: brew update && brew install ninja cmake if: matrix.os == 'macos-latest' - - uses: actions/checkout@v2 + - uses: actions/checkout@v4 with: submodules: false @@ -79,7 +80,7 @@ jobs: # Restore both vcpkg and its artifacts from the GitHub cache service. - name: Restore vcpkg and its artifacts. - uses: actions/cache@v2 + uses: actions/cache@v4 with: path: | ${{ env.VCPKG_ROOT }} @@ -122,7 +123,7 @@ jobs: run: | cmake --build "${{ env.CMAKE_BUILD_DIR }}" - - uses: actions/upload-artifact@v2 + - uses: actions/upload-artifact@v4 with: - name: et-client-${{matrix.os}} + name: et-client-${{matrix.os}}-gcc${{matrix.gcc}} path: ${{ env.CMAKE_BUILD_DIR }}/et${{matrix.extension}} diff --git a/CMakeLists.txt b/CMakeLists.txt index 402e3e804..6d4e89bc3 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -94,8 +94,10 @@ include(CMakeFindDependencyMacro) # Add cmake script directory. list(APPEND CMAKE_MODULE_PATH "${CMAKE_SOURCE_DIR}/cmake") -set(CMAKE_MODULE_PATH "${EXTERNAL_DIR}/sanitizers-cmake/cmake" - ${CMAKE_MODULE_PATH}) +set(CMAKE_MODULE_PATH "${EXTERNAL_DIR}/sanitizers-cmake/cmake" ${CMAKE_MODULE_PATH}) + +# Do not install httplib files +set(HTTPLIB_INSTALL OFF) # Required packages find_package(OpenSSL REQUIRED) @@ -112,7 +114,9 @@ add_subdirectory(${EXTERNAL_DIR}/cxxopts) add_subdirectory(${EXTERNAL_DIR}/cpp-httplib) add_subdirectory(${EXTERNAL_DIR}/json) include_directories( - ${EXTERNAL_DIR}/cpp-httplib + SYSTEM ${EXTERNAL_DIR}/cpp-httplib +) +include_directories( ${EXTERNAL_DIR}/simpleini ${EXTERNAL_DIR}/json/include ${EXTERNAL_DIR}/cxxopts/include @@ -563,9 +567,10 @@ else(WIN32) test/Main.cpp ) - add_dependencies(et-test TerminalCommon et-lib) + add_dependencies(et-test generated-code TerminalCommon et-lib) target_link_libraries( et-test + LINK_PUBLIC TerminalCommon et-lib Catch2::Catch2WithMain @@ -577,7 +582,7 @@ else(WIN32) ${Boost_LIBRARIES} ${CORE_LIBRARIES}) add_test(et-test et-test) - decorate_target(et-test) + add_sanitizers(et-test) endif() if(BUILD_TESTING AND FUZZING) diff --git a/README.md b/README.md index 569db2468..2acaa7107 100644 --- a/README.md +++ b/README.md @@ -111,14 +111,14 @@ Install dependencies: * Fedora (tested on 25): ``` - sudo dnf install boost-devel libsodium-devel ncurses-devel protobuf-devel \ + sudo dnf install boost-devel libsodium-devel protobuf-devel \ protobuf-compiler cmake gflags-devel libcurl-devel ``` * Gentoo: ``` - sudo emerge dev-libs/boost dev-libs/libsodium sys-libs/ncurses \ + sudo emerge dev-libs/boost dev-libs/libsodium \ dev-libs/protobuf dev-util/cmake dev-cpp/gflags ``` @@ -206,7 +206,7 @@ git clone --recurse-submodules https://github.com/MisterTea/EternalTerminal.git cd EternalTerminal mkdir build cd build -# Make it work on Apple Silicon: +# Add if it doesn't work on Apple Silicon but should work without it if [[ $(uname -a | grep arm) ]]; then export VCPKG_FORCE_SYSTEM_BINARIES=1; fi cmake ../ make && sudo make install @@ -227,8 +227,8 @@ Grab the deps and then follow this process. Debian/Ubuntu Dependencies: ``` -sudo apt install libboost-dev libsodium-dev libncurses5-dev \ - libprotobuf-dev protobuf-compiler libgflags-dev libutempter-dev libcurl-dev \ +sudo apt install libboost-dev libsodium-dev \ + libprotobuf-dev protobuf-compiler libgflags-dev libutempter-dev libcurl4-openssl-dev \ build-essential ninja-build cmake git zip ``` @@ -255,7 +255,7 @@ Once built, the binary only requires `libgflags-dev` and `libprotobuf-dev`. Install dependencies: ``` sudo yum install epel-release -sudo yum install cmake3 boost-devel libsodium-devel ncurses-devel protobuf-devel \ +sudo yum install cmake3 boost-devel libsodium-devel protobuf-devel \ protobuf-compiler gflags-devel protobuf-lite-devel libcurl-devel \ perl-IPC-Cmd perl-Data-Dumper libunwind-devel libutempter-devel ``` diff --git a/cmake/Findsodium.cmake b/cmake/Findsodium.cmake index dd6ed7679..0528287a1 100644 --- a/cmake/Findsodium.cmake +++ b/cmake/Findsodium.cmake @@ -12,7 +12,7 @@ ######################################################################## # Tries to find the local libsodium installation. # -# On Windows the sodium_DIR enviroment variable is used as a default +# On Windows the sodium_DIR environment variable is used as a default # hint which can be overridden by setting the corresponding cmake variable. # # Once done the following variables will be defined: diff --git a/docker/Dockerfile.client b/docker/Dockerfile.client index 949d64f5f..63cea76bf 100644 --- a/docker/Dockerfile.client +++ b/docker/Dockerfile.client @@ -1,7 +1,7 @@ -FROM centos:7 as base +FROM centos:8 as base ENV BUILD_REPOS="epel-release centos-release-scl" \ - BUILD_DEPS="cmake3 boost-devel libsodium-devel ncurses-devel protobuf-devel \ + BUILD_DEPS="cmake3 boost-devel libsodium-devel protobuf-devel \ protobuf-compiler gflags-devel protobuf-lite-devel git \ perl-IPC-Cmd perl-Data-Dumper libunwind-devel libutempter-devel \ devtoolset-11 devtoolset-11-libatomic-devel rh-git227" @@ -18,7 +18,7 @@ RUN yum install -y $BUILD_REPOS && \ bash -c "scl enable devtoolset-11 rh-git227 'cmake3 ../'" && \ bash -c "scl enable devtoolset-11 'make -j $(grep ^processor /proc/cpuinfo |wc -l) && make install'" -FROM centos:7 +FROM centos:8 RUN yum install -y epel-release && \ yum install -y protobuf-lite libsodium libatomic libunwind diff --git a/docker/Dockerfile.server b/docker/Dockerfile.server index bb432d660..62a41edc3 100644 --- a/docker/Dockerfile.server +++ b/docker/Dockerfile.server @@ -1,7 +1,7 @@ -FROM centos:7 as base +FROM centos:8 as base ENV BUILD_REPOS="epel-release centos-release-scl" \ - BUILD_DEPS="cmake3 boost-devel libsodium-devel ncurses-devel protobuf-devel \ + BUILD_DEPS="cmake3 boost-devel libsodium-devel protobuf-devel \ protobuf-compiler gflags-devel protobuf-lite-devel git \ perl-IPC-Cmd perl-Data-Dumper libunwind-devel libutempter-devel \ devtoolset-11 devtoolset-11-libatomic-devel rh-git227" @@ -18,7 +18,7 @@ RUN yum install -y $BUILD_REPOS && \ bash -c "scl enable devtoolset-11 rh-git227 'cmake3 ../'" && \ bash -c "scl enable devtoolset-11 'make -j $(grep ^processor /proc/cpuinfo |wc -l) && make install'" -FROM centos:7 +FROM centos:8 RUN yum install -y epel-release && \ yum install -y protobuf-lite libsodium openssh-server libatomic libunwind @@ -31,4 +31,4 @@ EXPOSE 2022 2222 ENTRYPOINT ["/bin/container-entrypoint", "server"] -CMD ["--cfgfile=/etc/et.cfg"] \ No newline at end of file +CMD ["--cfgfile=/etc/et.cfg"] diff --git a/external/Catch2 b/external/Catch2 index 3f0283de7..05e10dfcc 160000 --- a/external/Catch2 +++ b/external/Catch2 @@ -1 +1 @@ -Subproject commit 3f0283de7a9c43200033da996ff9093be3ac84dc +Subproject commit 05e10dfccc28c7f973727c54f850237d07d5e10f diff --git a/external/PlatformFolders b/external/PlatformFolders index d096651df..784f8ceb8 160000 --- a/external/PlatformFolders +++ b/external/PlatformFolders @@ -1 +1 @@ -Subproject commit d096651dfefcec30e91eba48cd52da32059caa8e +Subproject commit 784f8ceb8bbd042722caf2cdec427c7b80e0a960 diff --git a/external/cpp-httplib b/external/cpp-httplib index 9452c0a4b..5c00bbf36 160000 --- a/external/cpp-httplib +++ b/external/cpp-httplib @@ -1 +1 @@ -Subproject commit 9452c0a4b69c5e4e31169ed32e961d330695122c +Subproject commit 5c00bbf36ba8ff47b4fb97712fc38cb2884e5b98 diff --git a/external/json b/external/json index 176d8e261..9cca280a4 160000 --- a/external/json +++ b/external/json @@ -1 +1 @@ -Subproject commit 176d8e261a00dc27bfbf334e12d50572bf43fc4c +Subproject commit 9cca280a4d0ccf0c08f47a99aa71d1b0e52f8d03 diff --git a/external/sentry-native b/external/sentry-native index 22b9b9372..4ec95c072 160000 --- a/external/sentry-native +++ b/external/sentry-native @@ -1 +1 @@ -Subproject commit 22b9b937272952be0e344c0d6c442878982e8caa +Subproject commit 4ec95c0725df5f34440db8fa8d37b4c519fce74e diff --git a/external/simpleini b/external/simpleini index 7bca74f65..9b3ed7ec8 160000 --- a/external/simpleini +++ b/external/simpleini @@ -1 +1 @@ -Subproject commit 7bca74f6535a37846162383e52071f380c99a43a +Subproject commit 9b3ed7ec815997bc8c5b9edf140d6bde653e1458 diff --git a/external/vcpkg b/external/vcpkg index ae59b93af..3f806e04f 160000 --- a/external/vcpkg +++ b/external/vcpkg @@ -1 +1 @@ -Subproject commit ae59b93af0c1dce4126e963fca5b330662495fa1 +Subproject commit 3f806e04fdcf30df33c7b445590c9ac13afef946 diff --git a/renovate.json b/renovate.json new file mode 100644 index 000000000..3b4b6a98b --- /dev/null +++ b/renovate.json @@ -0,0 +1,40 @@ +{ + "$schema": "https://docs.renovatebot.com/renovate-schema.json", + "extends": [ + "config:base" + ], + "includeForks": true, + "lockFileMaintenance": { + "enabled": true, + "automerge": true, + "automergeType": "pr", + "platformAutomerge": true + }, + "platformAutomerge": true, + "packageRules": [ + { + "description": "Automerge non-major updates", + "matchUpdateTypes": [ + "minor", + "patch" + ], + "enabled": true, + "automerge": true, + "automergeType": "pr", + "platformAutomerge": true + }, + { + "matchDepTypes": [ + "devDependencies" + ], + "matchPackagePatterns": [ + "lint", + "prettier" + ], + "enabled": true, + "automerge": true, + "automergeType": "pr", + "platformAutomerge": true + } + ] +} \ No newline at end of file diff --git a/src/base/BackedReader.cpp b/src/base/BackedReader.cpp index 295a29248..f93b7c432 100644 --- a/src/base/BackedReader.cpp +++ b/src/base/BackedReader.cpp @@ -47,7 +47,7 @@ int BackedReader::read(Packet* packet) { socketHandler->read(socketFd, tmpBuf, 4 - partialMessage.length()); if (bytesRead == 0) { // Connection is closed. Instead of closing the socket, set EPIPE. - // In EternalTCP, the server needs to explictly tell the client that + // In EternalTCP, the server needs to explicitly tell the client that // the session is over. SetErrno(EPIPE); return -1; @@ -73,7 +73,7 @@ int BackedReader::read(Packet* packet) { ssize_t bytesRead = socketHandler->read(socketFd, &s[0], s.length()); if (bytesRead == 0) { // Connection is closed. Instead of closing the socket, set EPIPE. - // In EternalTCP, the server needs to explictly tell the client that + // In EternalTCP, the server needs to explicitly tell the client that // the session is over. SetErrno(EPIPE); return -1; diff --git a/src/base/SocketHandler.cpp b/src/base/SocketHandler.cpp index 8658ddd6d..3e6473aa0 100644 --- a/src/base/SocketHandler.cpp +++ b/src/base/SocketHandler.cpp @@ -20,7 +20,7 @@ void SocketHandler::readAll(int fd, void* buf, size_t count, bool timeout) { ssize_t bytesRead = read(fd, ((char*)buf) + pos, count - pos); if (bytesRead == 0) { // Connection is closed. Instead of closing the socket, set EPIPE. - // In EternalTCP, the server needs to explictly tell the client that + // In EternalTCP, the server needs to explicitly tell the client that // the session is over. SetErrno(EPIPE); bytesRead = -1; diff --git a/src/base/TunnelUtils.cpp b/src/base/TunnelUtils.cpp index 6683c7151..a95384acf 100644 --- a/src/base/TunnelUtils.cpp +++ b/src/base/TunnelUtils.cpp @@ -37,6 +37,7 @@ vector parseRangesToRequests(const string& input) { int portRangeLength = sourcePortEnd - sourcePortStart + 1; for (int i = 0; i < portRangeLength; ++i) { PortForwardSourceRequest pfsr; + pfsr.mutable_source()->set_name("localhost"); pfsr.mutable_source()->set_port(sourcePortStart + i); pfsr.mutable_destination()->set_port(destinationPortStart + i); pfsrs.push_back(pfsr); @@ -49,6 +50,7 @@ vector parseRangesToRequests(const string& input) { "destination must be a range (and vice versa)"); } else { PortForwardSourceRequest pfsr; + pfsr.mutable_source()->set_name("localhost"); pfsr.mutable_source()->set_port(stoi(sourceDestination[0])); pfsr.mutable_destination()->set_port(stoi(sourceDestination[1])); pfsrs.push_back(pfsr); diff --git a/src/htm/MultiplexerState.cpp b/src/htm/MultiplexerState.cpp index c98ba3fb6..16fb6858d 100644 --- a/src/htm/MultiplexerState.cpp +++ b/src/htm/MultiplexerState.cpp @@ -86,7 +86,7 @@ string MultiplexerState::toJsonString() { void MultiplexerState::appendData(const string &uid, const string &data) { if (panes.find(uid) == panes.end()) { - STFATAL << "Tried to write to non-existant terminal"; + STFATAL << "Tried to write to non-existent terminal"; } panes[uid]->terminal->appendData(data); } diff --git a/src/terminal/ParseConfigFile.hpp b/src/terminal/ParseConfigFile.hpp index 208725a92..5cb0b5f06 100644 --- a/src/terminal/ParseConfigFile.hpp +++ b/src/terminal/ParseConfigFile.hpp @@ -60,6 +60,11 @@ typedef int socket_t; #endif #endif +#ifdef _MSC_VER +#define strncasecmp _strnicmp +#define strcasecmp _stricmp +#endif + enum ssh_config_opcode_e { SOC_UNSUPPORTED = -1, SOC_HOST, @@ -1426,6 +1431,7 @@ int parse_ssh_config_file(const char *targethost, struct Options *options, } ifstream infile(expandedFilename); + free(expandedFilename); if (!infile.good()) { LOG(INFO) << filename << " not found"; return 0; diff --git a/src/terminal/ServerFifoPath.cpp b/src/terminal/ServerFifoPath.cpp index f487e2350..36db43150 100644 --- a/src/terminal/ServerFifoPath.cpp +++ b/src/terminal/ServerFifoPath.cpp @@ -82,7 +82,7 @@ void TryCreateDirectory(string dir, mode_t mode) { CHECK_EQ(::umask(oldMode), 0) << "Unexpected result when restoring umask, which should return the " - "previous overriden value (0)."; + "previous overridden value (0)."; } } // namespace @@ -106,7 +106,7 @@ void ServerFifoPath::createDirectoriesIfRequired() { // // Create subdirectories for ~/.local/share. These may already be created // with different permissions on different machines, so also create an - // etserver subdir to enforce 0700 permssions. + // etserver subdir to enforce 0700 permissions. const string homeDir = GetHome(); TryCreateDirectory(homeDir + "/.local", 0755); TryCreateDirectory(homeDir + "/.local/share", 0755); diff --git a/src/terminal/SshSetupHandler.cpp b/src/terminal/SshSetupHandler.cpp index 956138796..1b82928b2 100644 --- a/src/terminal/SshSetupHandler.cpp +++ b/src/terminal/SshSetupHandler.cpp @@ -27,7 +27,8 @@ string genCommand(const string& passkey, const string& id, string SshSetupHandler::SetupSsh(const string& user, const string& host, const string& host_alias, int port, - const string& jumphost, int jport, bool kill, + const string& jumphost, + const string& jServerFifo, bool kill, int vlevel, const string& cmd_prefix, const string& serverFifo, const std::vector& ssh_options) { @@ -60,7 +61,7 @@ string SshSetupHandler::SetupSsh(const string& user, const string& host, if (!jumphost.empty()) { ssh_args = { "-J", - SSH_USER_PREFIX + jumphost, + jumphost, }; } @@ -72,6 +73,9 @@ string SshSetupHandler::SetupSsh(const string& user, const string& host, ssh_args.push_back(SSH_SCRIPT_DST); + std::string ssh_concat; + for (const auto& piece : ssh_args) ssh_concat += piece + " "; + VLOG(1) << "Trying ssh with args: " << ssh_concat << endl; auto sshBuffer = SubprocessToStringInteractive("ssh", ssh_args); try { @@ -107,9 +111,12 @@ string SshSetupHandler::SetupSsh(const string& user, const string& host, if (!jumphost.empty()) { /* If jumphost is set, we need to pass dst host and port to jumphost * and connect to jumphost here */ - string cmdoptions{"--verbose=" + std::to_string(vlevel)}; - string jump_cmdoptions = cmdoptions + " --jump --dsthost=" + host + - " --dstport=" + to_string(port); + string jump_cmdoptions{"--verbose=" + std::to_string(vlevel)}; + if (!jServerFifo.empty()) { + jump_cmdoptions += " --serverfifo=" + jServerFifo; + } + jump_cmdoptions = jump_cmdoptions + " --jump --dsthost=" + host + + " --dstport=" + to_string(port); string SSH_SCRIPT_JUMP = genCommand(passkey, id, clientTerm, user, kill, cmd_prefix, jump_cmdoptions); diff --git a/src/terminal/SshSetupHandler.hpp b/src/terminal/SshSetupHandler.hpp index 5bc0ddb8d..279bee9b6 100644 --- a/src/terminal/SshSetupHandler.hpp +++ b/src/terminal/SshSetupHandler.hpp @@ -8,8 +8,8 @@ class SshSetupHandler { public: static string SetupSsh(const string &user, const string &host, const string &host_alias, int port, - const string &jumphost, int jport, bool kill, - int vlevel, const string &etterminal_path, + const string &jumphost, const string &jServerFifo, + bool kill, int vlevel, const string &etterminal_path, const string &serverFifo, const std::vector &ssh_options); static const string ETTERMINAL_BIN; diff --git a/src/terminal/TelemetryService.cpp b/src/terminal/TelemetryService.cpp index c354b30e7..75a6df790 100644 --- a/src/terminal/TelemetryService.cpp +++ b/src/terminal/TelemetryService.cpp @@ -209,11 +209,12 @@ TelemetryService::TelemetryService(const bool _allow, logSendingThread.reset(new thread([this]() { auto nextDumpTime = std::chrono::system_clock::now(); while (true) { - bool lastRun = shuttingDown; + bool lastRun; string payload; int logBufferSize; { lock_guard guard(logMutex); + lastRun = shuttingDown; logBufferSize = (int)logBuffer.size(); } if (logBufferSize) { @@ -253,6 +254,7 @@ TelemetryService::TelemetryService(const bool _allow, } TelemetryService::~TelemetryService() { + lock_guard guard(logMutex); if (!shuttingDown) { cerr << "Destroyed telemetryService without a shutdown"; } @@ -269,10 +271,10 @@ void TelemetryService::logToSentry(el::Level level, const string& message) { void TelemetryService::logToDatadog(const string& logText, el::Level logLevel, const string& filename, const int line) { map messageJson = { - {"message", logText}, {"level", logLevelToString(logLevel)}, - {"Enviroment", environment}, {"Application", "Eternal Terminal"}, - {"Version", ET_VERSION}, {"TelemetryId", telemetryId.str()}, - {"File", filename}, {"Line", to_string(line)}}; + {"message", logText}, {"level", logLevelToString(logLevel)}, + {"Environment", environment}, {"Application", "Eternal Terminal"}, + {"Version", ET_VERSION}, {"TelemetryId", telemetryId.str()}, + {"File", filename}, {"Line", to_string(line)}}; lock_guard lock(logMutex); if (logBuffer.size() > 16 * 1024) { @@ -283,10 +285,13 @@ void TelemetryService::logToDatadog(const string& logText, el::Level logLevel, } void TelemetryService::shutdown() { - if (shuttingDown) { - return; + { + lock_guard guard(logMutex); + if (shuttingDown) { + return; + } + shuttingDown = true; } - shuttingDown = true; #ifdef USE_SENTRY sentry_shutdown(); #endif diff --git a/src/terminal/TerminalClientMain.cpp b/src/terminal/TerminalClientMain.cpp index dc7c0677d..0114fef7d 100644 --- a/src/terminal/TerminalClientMain.cpp +++ b/src/terminal/TerminalClientMain.cpp @@ -42,6 +42,25 @@ int main(int argc, char** argv) { // Override easylogging handler for sigint ::signal(SIGINT, et::InterruptSignalHandler); + Options sshConfigOptions = { + NULL, // username + NULL, // host + NULL, // sshdir + NULL, // knownhosts + NULL, // ProxyCommand + NULL, // ProxyJump + 0, // timeout + 0, // port + 0, // StrictHostKeyChecking + 0, // ssh2 + 0, // ssh1 + NULL, // gss_server_identity + NULL, // gss_client_identity + 0, // gss_delegate_creds + 0, // forward_agent + NULL // identity_agent + }; + // Parse command line arguments cxxopts::Options options("et", "Remote shell for the busy and impatient"); try { @@ -51,7 +70,7 @@ int main(int argc, char** argv) { "[OPTION...] [user@]host[:port]\n\n" " Note that 'host' can be a hostname or ipv4 address with or without " "a port\n or an ipv6 address. If the ipv6 address is abbreviated with " - ":: then it must\n be specfied without a port (use -p,--port)."); + ":: then it must\n be specified without a port (use -p,--port)."); options.add_options() // ("h,help", "Print help") // @@ -82,6 +101,9 @@ int main(int argc, char** argv) { cxxopts::value()) // ("jport", "Jumphost machine port", cxxopts::value()->default_value("2022")) // + ("jserverfifo", + "If set, communicate to jumphost on the matching fifo name", + cxxopts::value()->default_value("")) // ("x,kill-other-sessions", "kill all old sessions belonging to the user") // ("macserver", @@ -213,44 +235,32 @@ int main(int argc, char** argv) { exit(0); } - Options sshConfigOptions = { - NULL, // username - NULL, // host - NULL, // sshdir - NULL, // knownhosts - NULL, // ProxyCommand - NULL, // ProxyJump - 0, // timeout - 0, // port - 0, // StrictHostKeyChecking - 0, // ssh2 - 0, // ssh1 - NULL, // gss_server_identity - NULL, // gss_client_identity - 0, // gss_delegate_creds - 0, // forward_agent - NULL // identity_agent - }; - - char* home_dir = ssh_get_user_home_dir(); - const char* host_from_command = destinationHost.c_str(); - ssh_options_set(&sshConfigOptions, SSH_OPTIONS_HOST, - destinationHost.c_str()); - // First parse user-specific ssh config, then system-wide config. - parse_ssh_config_file(host_from_command, &sshConfigOptions, - string(home_dir) + USER_SSH_CONFIG_PATH); - parse_ssh_config_file(host_from_command, &sshConfigOptions, - SYSTEM_SSH_CONFIG_PATH); - LOG(INFO) << "Parsed ssh config file, connecting to " - << sshConfigOptions.host; - destinationHost = string(sshConfigOptions.host); + { + char* home_dir = ssh_get_user_home_dir(); + const char* host_from_command = destinationHost.c_str(); + ssh_options_set(&sshConfigOptions, SSH_OPTIONS_HOST, + destinationHost.c_str()); + // First parse user-specific ssh config, then system-wide config. + parse_ssh_config_file(host_from_command, &sshConfigOptions, + string(home_dir) + USER_SSH_CONFIG_PATH); + parse_ssh_config_file(host_from_command, &sshConfigOptions, + SYSTEM_SSH_CONFIG_PATH); + if (sshConfigOptions.host) { + LOG(INFO) << "Parsed ssh config file, connecting to " + << sshConfigOptions.host; + destinationHost = string(sshConfigOptions.host); + } + free(home_dir); + } // Parse username: cmdline > sshconfig > localuser if (username.empty()) { if (sshConfigOptions.username) { username = string(sshConfigOptions.username); } else { - username = string(ssh_get_local_username()); + char* usernamePtr = ssh_get_local_username(); + username = string(usernamePtr); + SAFE_FREE(usernamePtr); } } @@ -259,11 +269,7 @@ int main(int argc, char** argv) { string proxyjump = string(sshConfigOptions.ProxyJump); size_t colonIndex = proxyjump.find(":"); if (colonIndex != string::npos) { - string userhostpair = proxyjump.substr(0, colonIndex); - size_t atIndex = userhostpair.find("@"); - if (atIndex != string::npos) { - jumphost = userhostpair.substr(atIndex + 1); - } + jumphost = proxyjump.substr(0, colonIndex); } else { jumphost = proxyjump; } @@ -275,7 +281,13 @@ int main(int argc, char** argv) { if (!jumphost.empty()) { is_jumphost = true; LOG(INFO) << "Setting port to jumphost port"; - socketEndpoint.set_name(jumphost); + size_t atIndex = jumphost.find("@"); + if (atIndex != string::npos) { + socketEndpoint.set_name(jumphost.substr(atIndex + 1)); + } else { + socketEndpoint.set_name(jumphost); + jumphost = username + "@" + jumphost; + } socketEndpoint.set_port(result["jport"].as()); } else { socketEndpoint.set_name(destinationHost); @@ -291,7 +303,11 @@ int main(int argc, char** argv) { exit(1); } - int jport = result["jport"].as(); + string jServerFifo = ""; + if (result["jserverfifo"].as() != "") { + jServerFifo = result["jserverfifo"].as(); + } + string serverFifo = ""; if (result["serverfifo"].as() != "") { serverFifo = result["serverfifo"].as(); @@ -304,13 +320,13 @@ int main(int argc, char** argv) { if (result.count("macserver") > 0) { etterminal_path = "/usr/local/bin/etterminal"; } - if (result.count("etterminal_path")) { + if (result.count("terminal-path")) { etterminal_path = result["terminal-path"].as(); } string idpasskeypair = SshSetupHandler::SetupSsh( - username, destinationHost, host_alias, destinationPort, jumphost, jport, - result.count("x") > 0, result["verbose"].as(), etterminal_path, - serverFifo, ssh_options); + username, destinationHost, host_alias, destinationPort, jumphost, + jServerFifo, result.count("x") > 0, result["verbose"].as(), + etterminal_path, serverFifo, ssh_options); string id = "", passkey = ""; // Trim whitespace @@ -362,6 +378,17 @@ int main(int argc, char** argv) { handleParseException(oe, options); } + // Clean up ssh config options + SAFE_FREE(sshConfigOptions.username); + SAFE_FREE(sshConfigOptions.host); + SAFE_FREE(sshConfigOptions.sshdir); + SAFE_FREE(sshConfigOptions.knownhosts); + SAFE_FREE(sshConfigOptions.ProxyCommand); + SAFE_FREE(sshConfigOptions.ProxyJump); + SAFE_FREE(sshConfigOptions.gss_server_identity); + SAFE_FREE(sshConfigOptions.gss_client_identity); + SAFE_FREE(sshConfigOptions.identity_agent); + #ifdef WIN32 WSACleanup(); #endif diff --git a/src/terminal/TerminalServer.cpp b/src/terminal/TerminalServer.cpp index c980019d7..6366cdd8c 100644 --- a/src/terminal/TerminalServer.cpp +++ b/src/terminal/TerminalServer.cpp @@ -58,7 +58,7 @@ void TerminalServer::run() { } tv.tv_sec = 0; - tv.tv_usec = 10000; + tv.tv_usec = 100000; const int numFdsSet = select(maxFd + 1, &rfds, NULL, NULL, &tv); if (numFdsSet < 0 && errno == EINTR) { @@ -144,7 +144,7 @@ void TerminalServer::runJumpHost( maxfd = max(maxfd, serverClientFd); } tv.tv_sec = 0; - tv.tv_usec = 10000; + tv.tv_usec = 100000; select(maxfd + 1, &rfd, NULL, NULL, &tv); try { @@ -282,7 +282,7 @@ void TerminalServer::runTerminal( maxfd = max(maxfd, serverClientFd); } tv.tv_sec = 0; - tv.tv_usec = 10000; + tv.tv_usec = 100000; select(maxfd + 1, &rfd, NULL, NULL, &tv); try { diff --git a/src/terminal/forwarding/PortForwardHandler.cpp b/src/terminal/forwarding/PortForwardHandler.cpp index 45e1a0bc8..df70c01c1 100644 --- a/src/terminal/forwarding/PortForwardHandler.cpp +++ b/src/terminal/forwarding/PortForwardHandler.cpp @@ -45,10 +45,6 @@ PortForwardSourceResponse PortForwardHandler::createSource( SocketEndpoint source; if (pfsr.has_source()) { source = pfsr.source(); - if (source.has_name()) { - throw runtime_error( - "Named socket tunneling is only allowed with temporary filenames."); - } } else { // Make a random file to forward the pipe string sourcePattern = diff --git a/test/ConnectionTest.cpp b/test/ConnectionTest.cpp index 960920598..8823d4a6a 100644 --- a/test/ConnectionTest.cpp +++ b/test/ConnectionTest.cpp @@ -140,8 +140,8 @@ class TestServerConnection : public ServerConnection { public: TestServerConnection(shared_ptr _socketHandler, SocketEndpoint socketEndpoint) - : ServerConnection(_socketHandler, socketEndpoint){}; - virtual ~TestServerConnection(){}; + : ServerConnection(_socketHandler, socketEndpoint) {} + virtual ~TestServerConnection() {} virtual bool newClient( shared_ptr _serverClientState) { string clientId = _serverClientState->getId(); diff --git a/test/FakeConsole.hpp b/test/FakeConsole.hpp index 6a2323974..f85679c48 100644 --- a/test/FakeConsole.hpp +++ b/test/FakeConsole.hpp @@ -158,7 +158,7 @@ class FakeUserTerminal : public UserTerminal { return getFd(); }; - virtual void runTerminal(){ + virtual void runTerminal() { }; diff --git a/test/system_tests/connect_with_jumphost.sh b/test/system_tests/connect_with_jumphost.sh new file mode 100755 index 000000000..f448df96e --- /dev/null +++ b/test/system_tests/connect_with_jumphost.sh @@ -0,0 +1,27 @@ +#!/bin/bash +set -x +set -e + +ssh -o 'PreferredAuthentications=publickey' localhost "echo" || exit 1 # Fails if we can't ssh into localhost without a password + +# Bypass host check +ssh -o "StrictHostKeyChecking no" localhost echo "Bypassing host check 1" +ssh -o "StrictHostKeyChecking no" 127.0.0.1 echo "Bypassing host check 2" + +mkdir -p /tmp/et_test_logs/connect_with_jumphost/1 +build/etserver --port 9900 --serverfifo=/tmp/etserver.idpasskey.fifo1 -l /tmp/et_test_logs/connect_with_jumphost/1 & +first_server_pid=$! + +mkdir -p /tmp/et_test_logs/connect_with_jumphost/2 +build/etserver --port 9901 --serverfifo=/tmp/etserver.idpasskey.fifo2 -l /tmp/et_test_logs/connect_with_jumphost/2 & +second_server_pid=$! +sleep 3 + +# Make sure servers are working +build/et -c "echo 'Hello World 1!'" --serverfifo=/tmp/etserver.idpasskey.fifo1 --logtostdout --terminal-path $PWD/build/etterminal localhost:9900 +build/et -c "echo 'Hello World 2!'" --serverfifo=/tmp/etserver.idpasskey.fifo2 --logtostdout --terminal-path $PWD/build/etterminal localhost:9901 + +build/et -c "echo 'Hello World 3!'" --serverfifo=/tmp/etserver.idpasskey.fifo2 --logtostdout --terminal-path $PWD/build/etterminal --jumphost localhost --jport 9900 --jserverfifo=/tmp/etserver.idpasskey.fifo1 127.0.0.1:9901 # We can't use 'localhost' for both the jumphost and the destination because ssh doesn't support keeping them the same. + +kill -9 $first_server_pid +kill -9 $second_server_pid