Replace wget with curl #1184
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# This file is auto generated, changes should be made to src/* then run build.sh to regenerate this file | |
name: 'build-test' | |
on: [pull_request, push] | |
# secrets: | |
# * GOOGLE_CREDENTIALS - base64 encoded google service account credentials, must be a single line with no whitespace | |
# * RESULTS_SHEET_ID - google sheets id of sheet to upload results to | |
env: | |
SECRET_GOOGLE_CREDENTIALS: ${{ secrets.GOOGLE_CREDENTIALS }} | |
SECRET_RESULTS_SHEET_ID: ${{ secrets.RESULTS_SHEET_ID }} | |
jobs: | |
build_and_test: | |
name: '${{ matrix.os }}: build and test (install mdns: ${{ matrix.install_mdns }}, use conan: ${{ matrix.use_conan }}, force cpprest asio: ${{ matrix.force_cpprest_asio }}, dns-sd mode: ${{ matrix.dns_sd_mode}}, enable_authorization: ${{ matrix.enable_authorization }})' | |
runs-on: ${{ matrix.os }} | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-20.04, macos-12, windows-2019] | |
install_mdns: [false, true] | |
use_conan: [true] | |
force_cpprest_asio: [false] | |
dns_sd_mode: [multicast, unicast] | |
enable_authorization: [false, true] | |
exclude: | |
# install_mdns is only meaningful on Linux | |
- os: macos-12 | |
enable_authorization: false | |
- os: windows-2019 | |
enable_authorization: false | |
- os: ubuntu-20.04 | |
enable_authorization: false | |
- os: macos-12 | |
install_mdns: true | |
- os: windows-2019 | |
install_mdns: true | |
# for now, unicast DNS-SD tests are only implemented on Linux | |
- os: macos-12 | |
dns_sd_mode: unicast | |
- os: windows-2019 | |
dns_sd_mode: unicast | |
# for now, exclude unicast DNS-SD with mDNSResponder due to | |
# intermittent *** buffer overflow detected *** in mdnsd | |
- os: ubuntu-20.04 | |
install_mdns: true | |
dns_sd_mode: unicast | |
enable_authorization: true | |
include: | |
- os: windows-2022 | |
install_mdns: false | |
use_conan: true | |
force_cpprest_asio: true | |
dns_sd_mode: multicast | |
enable_authorization: true | |
- os: windows-2022 | |
install_mdns: false | |
use_conan: true | |
force_cpprest_asio: true | |
dns_sd_mode: multicast | |
enable_authorization: false | |
- os: ubuntu-22.04 | |
install_mdns: false | |
use_conan: true | |
force_cpprest_asio: false | |
dns_sd_mode: multicast | |
enable_authorization: true | |
- os: ubuntu-22.04 | |
install_mdns: false | |
use_conan: true | |
force_cpprest_asio: false | |
dns_sd_mode: multicast | |
enable_authorization: false | |
steps: | |
- uses: actions/checkout@v3 | |
- name: set environment variables | |
shell: bash | |
run: | | |
if [[ "${{ matrix.enable_authorization }}" == "true" ]]; then | |
authorization_mode=auth | |
else | |
authorization_mode=noauth | |
fi | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "BUILD_NAME=${{ matrix.os }}_mdns_${{ matrix.dns_sd_mode }}_$authorization_mode" >> $GITHUB_ENV | |
else | |
echo "BUILD_NAME=${{ matrix.os }}_avahi_${{ matrix.dns_sd_mode }}_$authorization_mode" >> $GITHUB_ENV | |
fi | |
elif [[ "${{ matrix.force_cpprest_asio }}" == "true" ]]; then | |
echo "BUILD_NAME=${{ matrix.os }}_asio_$authorization_mode" >> $GITHUB_ENV | |
else | |
echo "BUILD_NAME=${{ matrix.os }}_$authorization_mode" >> $GITHUB_ENV | |
fi | |
GITHUB_COMMIT=`echo "${{ github.sha }}" | cut -c1-7` | |
echo "GITHUB_COMMIT=$GITHUB_COMMIT" >> $GITHUB_ENV | |
echo "GITHUB_WORKSPACE=${{ github.workspace }}" >> $GITHUB_ENV | |
echo "RUNNER_WORKSPACE=${{ runner.workspace }}" >> $GITHUB_ENV | |
- name: install python | |
uses: actions/setup-python@v4 | |
with: | |
python-version: 3.8 | |
- name: install pip | |
run: | | |
python -m pip install --upgrade pip | |
- name: setup google credentials | |
if: env.SECRET_GOOGLE_CREDENTIALS | |
shell: bash | |
working-directory: ${{ env.GITHUB_WORKSPACE }} | |
run: | | |
mkdir -p gdrive | |
echo "${{ env.SECRET_GOOGLE_CREDENTIALS }}" | openssl base64 -d -A -out gdrive/credentials.json | |
echo "GDRIVE_CREDENTIALS=`pwd`/gdrive/credentials.json" >> $GITHUB_ENV | |
- name: install conan | |
if: matrix.use_conan == true | |
run: | | |
pip install conan~=2.4.1 | |
- name: install cmake | |
if: matrix.os != 'ubuntu-14.04' | |
uses: lukka/[email protected] | |
- name: setup bash path | |
working-directory: ${{ env.GITHUB_WORKSPACE }} | |
shell: bash | |
run: | | |
# translate GITHUB_WORKSPACE into a bash path from a windows path | |
workspace_dir=`pwd` | |
echo "GITHUB_WORKSPACE_BASH=${workspace_dir}" >> $GITHUB_ENV | |
- name: windows setup | |
if: runner.os == 'Windows' | |
run: | | |
# set compiler to cl.exe to avoid building with gcc. | |
echo "CMAKE_COMPILER_ARGS=-DCMAKE_C_COMPILER=cl.exe -DCMAKE_CXX_COMPILER=cl.exe" >> $env:GITHUB_ENV | |
# disable unused network interface | |
netsh interface set interface name="vEthernet (nat)" admin=DISABLED | |
# get host IP address | |
$env:hostip = ( | |
Get-NetIPConfiguration | | |
Where-Object { | |
$_.IPv4DefaultGateway -ne $null -and | |
$_.NetAdapter.Status -ne "Disconnected" | |
} | |
).IPv4Address.IPAddress | |
echo "HOST_IP_ADDRESS=$env:hostip" >> $env:GITHUB_ENV | |
ipconfig | |
# add the CRL Distribution Point to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
# and avoid SSL Error: WINHTTP_CALLBACK_STATUS_FLAG_CERT_REV_FAILED failed to check revocation status. | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip crl.testsuite.nmos.tv`n" | |
# add the OCSP server to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip ocsp.testsuite.nmos.tv`n" | |
# add nmos-api.local to hosts to workaround mDNS lookups on windows being very slow and causing the AMWA test suite to take 2-3 hours to complete | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip nmos-api.local`n" | |
# add nmos-mocks.local to hosts to workaround mDNS lookups on windows being very slow and causing the AMWA test suite IS-04-01 test_05 to fail due to latency messing up the apparent heart beat interval | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip nmos-mocks.local`n" | |
# Configure SCHANNEL, e.g. to disable TLS 1.0 and TLS 1.1 | |
reg import ${{ env.GITHUB_WORKSPACE }}/Sandbox/configure_schannel.reg | |
- name: windows install bonjour | |
if: runner.os == 'Windows' | |
run: | | |
# download bonjour installer | |
curl -L https://download.info.apple.com/Mac_OS_X/061-8098.20100603.gthyu/BonjourPSSetup.exe -o BonjourPSSetup.exe -q | |
& 7z.exe e BonjourPSSetup.exe Bonjour64.msi -y | |
msiexec /i ${{ env.GITHUB_WORKSPACE }}\Bonjour64.msi /qn /norestart | |
- name: mac setup | |
if: runner.os == 'macOS' | |
run: | | |
hostip=$(ipconfig getifaddr en0) | |
echo "HOST_IP_ADDRESS=$hostip" >> $GITHUB_ENV | |
active_xcode_version=`xcode-select -p` | |
echo "SDKROOT=${active_xcode_version}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk" >> $GITHUB_ENV | |
ifconfig | |
echo "CTEST_EXTRA_ARGS=$CTEST_EXTRA_ARGS -E testMdnsResolveAPIs" >> $GITHUB_ENV | |
echo "CTEST_EXPECTED_FAILURES=$CTEST_EXPECTED_FAILURES -R testMdnsResolveAPIs" >> $GITHUB_ENV | |
# add the CRL Distribution Point and the OCSP server to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
echo -e "$hostip crl.testsuite.nmos.tv\n$hostip ocsp.testsuite.nmos.tv" | sudo tee -a /etc/hosts > /dev/null | |
# testssl.sh needs "timeout" | |
brew install coreutils | |
- name: mac docker install | |
# installs docker on a mac runner. Github's documentation states docker is already available so this shouldn't be necessary | |
# can be used to run AWMA test suite but test suite doesn't seem to be able to communicate with nodes running on the host | |
if: false | |
run: | | |
brew install docker docker-compose docker-machine xhyve docker-machine-driver-xhyve | |
sudo chown root:wheel $(brew --prefix)/opt/docker-machine-driver-xhyve/bin/docker-machine-driver-xhyve | |
sudo chmod u+s $(brew --prefix)/opt/docker-machine-driver-xhyve/bin/docker-machine-driver-xhyve | |
mkdir -p /Users/runner/.docker/machine/cache/ | |
# workaround "docker-machine" failing to download boot2docker.iso | |
curl -Lo ~/.docker/machine/cache/boot2docker.iso https://github.com/boot2docker/boot2docker/releases/download/v19.03.5/boot2docker.iso | |
i=0 | |
while ! docker-machine "--github-api-token=${{ secrets.GITHUB_TOKEN }}" create default --driver xhyve; do | |
docker-machine rm -f default | |
sleep 1 | |
$(( i++ )) | |
if [[ $i -gt 5 ]]; then | |
exit 1 | |
fi | |
done | |
eval $(docker-machine env default) | |
echo "DOCKER_MACHINE_NAME=$DOCKER_MACHINE_NAME" >> $GITHUB_ENV | |
echo "DOCKER_TLS_VERIFY=$DOCKER_TLS_VERIFY" >> $GITHUB_ENV | |
echo "DOCKER_HOST=$DOCKER_HOST" >> $GITHUB_ENV | |
echo "DOCKER_CERT_PATH=$DOCKER_CERT_PATH" >> $GITHUB_ENV | |
- name: ubuntu setup | |
if: runner.os == 'Linux' | |
run: | | |
sudo ip addr flush dev docker0 || echo "remove docker ip failed" | |
hostip=$(hostname -I | cut -f1 -d' ') | |
echo "HOST_IP_ADDRESS=$hostip" >> $GITHUB_ENV | |
ip address | |
# add the CRL Distribution Point and the OCSP server to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
echo -e "$hostip crl.testsuite.nmos.tv\n$hostip ocsp.testsuite.nmos.tv" | sudo tee -a /etc/hosts > /dev/null | |
# re-synchronize the package index | |
sudo apt-get update -q | |
- name: ubuntu mdns install | |
if: runner.os == 'Linux' && matrix.install_mdns == true | |
run: | | |
cd ${{ env.GITHUB_WORKSPACE }} | |
mkdir mDNSResponder | |
cd mDNSResponder | |
curl -L https://github.com/apple-oss-distributions/mDNSResponder/archive/mDNSResponder-878.200.35.tar.gz -s | tar -xvzf - --strip-components=1 > /dev/null | |
patch -p1 < ${{ env.GITHUB_WORKSPACE }}/Development/third_party/mDNSResponder/unicast.patch | |
patch -p1 < ${{ env.GITHUB_WORKSPACE }}/Development/third_party/mDNSResponder/permit-over-long-service-types.patch | |
patch -p1 < ${{ env.GITHUB_WORKSPACE }}/Development/third_party/mDNSResponder/poll-rather-than-select.patch | |
cd mDNSPosix | |
make os=linux && sudo make os=linux install | |
# install Name Service Cache Daemon to speed up repeated mDNS name discovery | |
sudo apt-get install -f nscd | |
if [ -f /.dockerenv ]; then | |
# nscd doesn't run automatically under docker | |
mkdir -p /var/run/nscd | |
nscd | |
fi | |
# force dependency on mDNSResponder | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }} -DNMOS_CPP_USE_AVAHI:BOOL=\"0\"" >> $GITHUB_ENV | |
- name: ubuntu non-conan setup | |
if: runner.os == 'Linux' && matrix.use_conan == false | |
run: | | |
sudo apt-get install -y \ | |
libboost-chrono-dev \ | |
libboost-date-time-dev \ | |
libboost-regex-dev \ | |
libboost-system-dev \ | |
libboost-thread-dev \ | |
libboost-random-dev \ | |
libboost-filesystem-dev \ | |
openssl \ | |
libssl-dev | |
cd ${{ env.RUNNER_WORKSPACE }} | |
git clone --recurse-submodules --branch v2.10.19 https://github.com/Microsoft/cpprestsdk | |
cd cpprestsdk/Release | |
mkdir build | |
cd build | |
cmake .. -DCMAKE_BUILD_TYPE:STRING="Release" -DWERROR:BOOL="0" -DBUILD_SAMPLES:BOOL="0" -DBUILD_TESTS:BOOL="0" | |
make -j 2 && sudo make install | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }}" \ | |
"-DWEBSOCKETPP_INCLUDE_DIR:PATH=\"${{ env.RUNNER_WORKSPACE }}/cpprestsdk/Release/libs/websocketpp\"" \ | |
"-DNMOS_CPP_USE_SUPPLIED_JSON_SCHEMA_VALIDATOR:BOOL=\"1\"" \ | |
"-DNMOS_CPP_USE_SUPPLIED_JWT_CPP:BOOL=\"1\"" \ | |
>> $GITHUB_ENV | |
- name: ubuntu avahi setup | |
if: runner.os == 'Linux' && matrix.install_mdns == false | |
run: | | |
sudo apt-get install -f libavahi-compat-libdnssd-dev libnss-mdns avahi-utils | |
echo "CTEST_EXTRA_ARGS=$CTEST_EXTRA_ARGS -E testMdnsAdvertiseAddress" >> $GITHUB_ENV | |
echo "CTEST_EXPECTED_FAILURES=$CTEST_EXPECTED_FAILURES -R testMdnsAdvertiseAddress" >> $GITHUB_ENV | |
# make avahi only respond on the "eth0" interface | |
sudo sed -i 's/#*allow-interfaces=.*/allow-interfaces=eth0/g' /etc/avahi/avahi-daemon.conf | |
sudo systemctl restart avahi-daemon | |
# install Name Service Cache Daemon to speed up repeated mDNS name discovery | |
sudo apt-get install -f nscd | |
# force dependency on avahi | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }} -DNMOS_CPP_USE_AVAHI:BOOL=\"1\"" >> $GITHUB_ENV | |
- name: force cpprest asio | |
if: matrix.force_cpprest_asio == true && matrix.use_conan == true | |
shell: bash | |
run: | | |
echo "CONAN_INSTALL_EXTRA_ARGS=--options\;cpprestsdk/*:http_client_impl=asio\;--options\;cpprestsdk/*:http_listener_impl=asio" >> $GITHUB_ENV | |
- name: enable conan | |
if: matrix.use_conan == true | |
shell: bash | |
run: | | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }}" \ | |
"-DCMAKE_PROJECT_TOP_LEVEL_INCLUDES:STRING=\"third_party/cmake/conan_provider.cmake\"" \ | |
"-DCONAN_INSTALL_ARGS:STRING=\"--build=missing\;${{ env.CONAN_INSTALL_EXTRA_ARGS }}\;--lockfile-out=conan.lock\"" \ | |
>> $GITHUB_ENV | |
cat $GITHUB_ENV | |
- name: setup developer command prompt for Microsoft Visual C++ | |
if: runner.os == 'Windows' | |
uses: ilammy/msvc-dev-cmd@v1 | |
- name: build | |
uses: lukka/[email protected] | |
with: | |
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced | |
cmakeListsTxtPath: '${{ env.GITHUB_WORKSPACE }}/Development/CMakeLists.txt' | |
buildDirectory: '${{ env.RUNNER_WORKSPACE }}/build/' | |
cmakeAppendedArgs: '-GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="${{ env.RUNNER_WORKSPACE }}/install" ${{ env.CMAKE_COMPILER_ARGS }} ${{ env.CMAKE_EXTRA_ARGS }}' | |
- name: dump conan lockfile | |
if: matrix.use_conan == true | |
run: | | |
cat ${{ env.RUNNER_WORKSPACE }}/build/conan.lock | |
- name: unit test | |
run: | | |
cd ${{ env.RUNNER_WORKSPACE }}/build/ | |
ctest --output-on-failure ${{ env.CTEST_EXTRA_ARGS }} | |
- name: expected unit test failures | |
if: env.CTEST_EXPECTED_FAILURES != null | |
continue-on-error: true | |
run: | | |
cd ${{ env.RUNNER_WORKSPACE }}/build/ | |
ctest --output-on-failure ${{ env.CTEST_EXPECTED_FAILURES }} | |
- name: install | |
uses: lukka/[email protected] | |
with: | |
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced | |
cmakeListsTxtPath: '${{ env.GITHUB_WORKSPACE }}/Development/CMakeLists.txt' | |
buildDirectory: '${{ env.RUNNER_WORKSPACE }}/build/' | |
buildWithCMakeArgs: '--target install' | |
- name: set install test environment variable | |
shell: bash | |
run: | | |
# replace backslashes with forward slashes on Windows | |
echo "CMAKE_WORKSPACE=${RUNNER_WORKSPACE//\\/\/}" >> $GITHUB_ENV | |
- name: install test | |
if: runner.os != 'macOS' | |
uses: lukka/[email protected] | |
with: | |
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced | |
cmakeListsTxtPath: '${{ env.GITHUB_WORKSPACE }}/Sandbox/my-nmos-node/CMakeLists.txt' | |
buildDirectory: '${{ env.RUNNER_WORKSPACE }}/build-my-nmos-node/' | |
cmakeAppendedArgs: '-GNinja | |
-DCMAKE_BUILD_TYPE=Release | |
-DCMAKE_FIND_PACKAGE_PREFER_CONFIG="1" | |
-DCMAKE_MODULE_PATH="${{ env.CMAKE_WORKSPACE }}/build/conan" | |
-DCMAKE_PREFIX_PATH="${{ env.CMAKE_WORKSPACE }}/install" | |
-DCMAKE_INSTALL_PREFIX="${{ env.CMAKE_WORKSPACE }}/build/conan" | |
${{ env.CMAKE_COMPILER_ARGS }}' | |
- name: install test log | |
if: runner.os != 'macOS' | |
run: | | |
# dump the log file created in Sandbox/my-nmos-node/CMakeLists.txt | |
cat ${{ env.RUNNER_WORKSPACE }}/build-my-nmos-node/my-nmos-node_include-release.txt | |
- name: install wsl | |
if: runner.os == 'Windows' | |
run: | | |
& curl -L https://aka.ms/wsl-ubuntu-1804 -o ubuntu-1804.appx | |
Rename-Item .\ubuntu-1804.appx .\ubuntu-1804.zip | |
Expand-Archive .\ubuntu-1804.zip .\ubuntu-1804 | |
cd ubuntu-1804 | |
.\ubuntu1804.exe install --root | |
- name: AMWA test suite | |
shell: bash | |
working-directory: ${{ env.RUNNER_WORKSPACE }} | |
run: | |
| | |
set -x | |
root_dir=`pwd` | |
# Install AMWA NMOS Testing Tool | |
git clone https://github.com/AMWA-TV/nmos-testing.git | |
cd nmos-testing | |
# Configure the Testing Tool so all APIs are tested with TLS and authorization | |
printf "from . import Config as CONFIG\nCONFIG.ENABLE_HTTPS = True\nCONFIG.MOCK_SERVICES_WARM_UP_DELAY = 30\nCONFIG.HTTP_TIMEOUT = 2\n" > nmostesting/UserConfig.py | |
# Set the DNS-SD mode | |
printf 'CONFIG.DNS_SD_MODE = "'${{ matrix.dns_sd_mode }}'"\n' >> nmostesting/UserConfig.py | |
# Set the client JWKS_URI for mock Authorization Server to obtain the client JSON Web Key Set (public keys) to verify the client_assertion, when the client is requesting the access token | |
if [[ "${{ matrix.dns_sd_mode }}" == "multicast" ]]; then | |
hostname=nmos-api.local | |
else | |
hostname=api.testsuite.nmos.tv | |
fi | |
printf 'CONFIG.JWKS_URI = "https://'${hostname}':1080/x-authorization/jwks"\n' >> nmostesting/UserConfig.py | |
if [[ "${{matrix.enable_authorization}}" == "true" ]]; then | |
printf 'CONFIG.ENABLE_AUTH = True\n' >> nmostesting/UserConfig.py | |
else | |
printf 'CONFIG.ENABLE_AUTH = False\n' >> nmostesting/UserConfig.py | |
fi | |
# Download testssl | |
cd testssl | |
curl -L https://github.com/drwetter/testssl.sh/archive/v3.0.7.tar.gz -s | tar -xvzf - --strip-components=1 > /dev/null | |
cd .. | |
# Create output directories | |
mkdir results | |
mkdir badges | |
if [[ "${{ env.DOCKER_TEST_SUITE }}" == "true" ]]; then | |
# run test suite in amwa/nmos-testing docker container | |
docker pull amwa/nmos-testing | |
docker run -d --name "nmos_testing" --entrypoint="/usr/bin/tail" -v `pwd`/results:/home/nmos-testing/results amwa/nmos-testing -f /dev/null | |
run_python="docker exec -i nmos_testing python3" | |
elif [[ "${{ env.VAGRANT_TEST_SUITE }}" == "true" ]]; then | |
# run test suite in vagrant VM | |
cp ${{ env.GITHUB_WORKSPACE_BASH }}/.github/workflows/mac_Vagrantfile ./Vagrantfile | |
vagrant plugin install vagrant-scp | |
vagrant up | |
vagrant ssh -- mkdir results | |
run_python="vagrant ssh -- python3" | |
elif [[ "${{ runner.os }}" == "Linux" && "${{ matrix.dns_sd_mode }}" == "unicast" ]]; then | |
# run test suite directly | |
sudo pip install --upgrade -r requirements.txt | |
# install SDPoker | |
npm install -g git+https://[email protected]/AMWA-TV/sdpoker.git | |
run_python="sudo python" | |
else | |
# run test suite directly | |
pip install -r requirements.txt | |
# install SDPoker | |
npm install -g git+https://[email protected]/AMWA-TV/sdpoker.git | |
run_python="python" | |
fi | |
pip install -r utilities/run-test-suites/gsheetsImport/requirements.txt | |
if [[ "${{ runner.os }}" == "Windows" ]]; then | |
# install certificates | |
certutil -enterprise -addstore -user root test_data\\BCP00301\\ca\\certs\\ca.cert.pem | |
certutil -enterprise -addstore -user ca test_data\\BCP00301\\ca\\intermediate\\certs\\intermediate.cert.pem | |
certutil -importpfx -enterprise test_data\\BCP00301\\ca\\intermediate\\certs\\ecdsa.api.testsuite.nmos.tv.cert.chain.pfx | |
certutil -importpfx -enterprise test_data\\BCP00301\\ca\\intermediate\\certs\\rsa.api.testsuite.nmos.tv.cert.chain.pfx | |
# RSA | |
netsh http add sslcert ipport=0.0.0.0:1080 certhash=021d50df2177c07095485184206ee2297e50b65c appid="{00000000-0000-0000-0000-000000000000}" | |
# ECDSA | |
#netsh http add sslcert ipport=0.0.0.0:1080 certhash=875eca592c49120254b32bb8bed90ac3679015a5 appid="{00000000-0000-0000-0000-000000000000}" | |
# RSA | |
netsh http add sslcert ipport=0.0.0.0:8088 certhash=021d50df2177c07095485184206ee2297e50b65c appid="{00000000-0000-0000-0000-000000000000}" | |
# ECDSA | |
#netsh http add sslcert ipport=0.0.0.0:8088 certhash=875eca592c49120254b32bb8bed90ac3679015a5 appid="{00000000-0000-0000-0000-000000000000}" | |
fi | |
if [[ "${{ runner.os }}" == "macOS" ]]; then | |
# force DNS lookups to IPv4 as mDNS lookups on macos seem to wait for the IPv6 lookup to timeout before returning the IPv4 result | |
mv nmostesting/GenericTest.py nmostesting/GenericTest.py.old | |
printf 'import socket\nold_getaddrinfo = socket.getaddrinfo\ndef new_getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):\n return old_getaddrinfo(host, port, socket.AF_INET, type, proto, flags)\nsocket.getaddrinfo = new_getaddrinfo\n' > nmostesting/GenericTest.py | |
cat nmostesting/GenericTest.py.old >> nmostesting/GenericTest.py | |
fi | |
if [[ "${{ runner.os }}" == "Linux" && "${{ matrix.use_conan }}" == "false" ]]; then | |
# ubuntu 14 non-conan build uses boost 1.54.0 which doesn't support disabling TLS 1.1 | |
mkdir -p ${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/nmos-testing-options/ | |
echo "--ignore test_01" > ${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/nmos-testing-options/BCP-003-01.txt | |
echo "1" > ${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/nmos-testing-options/BCP-003-01_max_disabled.txt | |
fi | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Stopping mdnsd" | |
sudo /etc/init.d/mdns stop | |
else | |
echo "Stopping avahi-daemon" | |
sudo systemctl stop avahi-daemon | |
fi | |
fi | |
if [[ "${{ matrix.dns_sd_mode }}" == "multicast" ]]; then | |
domain=local | |
else | |
domain=testsuite.nmos.tv | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
# add host names | |
echo -e "${{ env.HOST_IP_ADDRESS }} api.$domain\n${{ env.HOST_IP_ADDRESS }} mocks.$domain" | sudo tee -a /etc/hosts > /dev/null | |
# force testing tool to cache specification repos before changing the resolver configuration | |
$run_python nmos-test.py suite IS-04-01 --selection auto --host ${{ env.HOST_IP_ADDRESS }} --port 444 --version v1.3 || true | |
# change the resolver configuration to use only the testing tool's mock DNS server | |
# and instead configure the mock DNS server to use an upstream DNS server | |
# as unicast DNS-SD test results are inconsistent if other servers are also configured | |
dns_upstream_ip=$(cat /etc/resolv.conf | grep ^nameserver | tr -s [:space:] ' ' | cut -f2 -d ' ' -s) | |
if [[ ! -z "$dns_upstream_ip" ]]; then | |
printf 'CONFIG.DNS_UPSTREAM_IP = "'${dns_upstream_ip}'"\n' >> nmostesting/UserConfig.py | |
fi | |
sudo cp /etc/resolv.conf /etc/resolv.conf.bak | |
echo -e "nameserver ${{ env.HOST_IP_ADDRESS }}" | sudo tee /etc/resolv.conf > /dev/null | |
else | |
echo "Unicast DNS-SD testing not yet supported on ${{ runner.os }}" && false | |
fi | |
fi | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Restarting mdnsd" | |
sudo /etc/init.d/mdns start | |
#sudo /usr/sbin/mdnsd -debug & | |
sleep 2 | |
dns-sd -V | |
else | |
echo "Restarting avahi-daemon" | |
sudo systemctl start avahi-daemon | |
sleep 2 | |
ps -e | grep avahi-daemon | |
avahi-daemon -V | |
fi | |
fi | |
if [[ "${{ runner.os }}" == "Linux" && "${{ matrix.install_mdns }}" == "false" ]]; then | |
# nmos-cpp-node doesn't currently support advertising hostnames to Avahi | |
avahi-publish -a -R nmos-api.local ${{ env.HOST_IP_ADDRESS }} & | |
fi | |
${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/run_nmos_testing.sh "$run_python" ${domain} ${root_dir}/build/nmos-cpp-node ${root_dir}/build/nmos-cpp-registry results badges $GITHUB_STEP_SUMMARY ${{ env.HOST_IP_ADDRESS }} "${{ env.GITHUB_COMMIT }}-${{ env.BUILD_NAME }}-" | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
ps -e | grep mdnsd || true | |
else | |
ps -e | grep avahi-daemon || true | |
fi | |
fi | |
if [[ "${{ matrix.dns_sd_mode }}" == "unicast" ]]; then | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
# restore DNS Server | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Stopping mdnsd" | |
sudo /etc/init.d/mdns stop | |
else | |
echo "Stopping avahi-daemon" | |
sudo systemctl stop avahi-daemon | |
fi | |
cat /etc/resolv.conf.bak | sudo tee /etc/resolv.conf > /dev/null | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Restarting mdnsd" | |
sudo /etc/init.d/mdns start | |
else | |
echo "Restarting avahi-daemon" | |
sudo systemctl start avahi-daemon | |
fi | |
fi | |
fi | |
if [[ "${{ env.DOCKER_TEST_SUITE }}" == "true" ]]; then | |
docker stop nmos_testing | |
docker rm nmos_testing | |
fi | |
if [[ "${{ env.VAGRANT_TEST_SUITE }}" == "true" ]]; then | |
vagrant scp :results/* results/ | |
vagrant destroy -f | |
fi | |
exit 0 | |
- name: upload to google sheets | |
if: github.ref == 'refs/heads/master' && github.event_name == 'push' | |
working-directory: ${{ env.RUNNER_WORKSPACE }} | |
shell: bash | |
run: | | |
export SHEET=https://docs.google.com/spreadsheets/d/${{ env.SECRET_RESULTS_SHEET_ID }} | |
python nmos-testing/utilities/run-test-suites/gsheetsImport/resultsImporter.py --credentials ${{ env.GDRIVE_CREDENTIALS }} --sheet "$SHEET" --insert --json nmos-testing/results/${{ env.GITHUB_COMMIT }}-*.json || echo "upload failed" | |
- uses: actions/[email protected] | |
with: | |
name: ${{ env.BUILD_NAME }}_badges | |
path: ${{ runner.workspace }}/nmos-testing/badges | |
- uses: actions/[email protected] | |
with: | |
name: ${{ env.BUILD_NAME }}_results | |
path: ${{ runner.workspace }}/nmos-testing/results | |
build_and_test_ubuntu_14: | |
name: '${{ matrix.os }}: build and test (install mdns: ${{ matrix.install_mdns }}, use conan: ${{ matrix.use_conan }}, force cpprest asio: ${{ matrix.force_cpprest_asio }}, dns-sd mode: ${{ matrix.dns_sd_mode}}, enable_authorization: ${{ matrix.enable_authorization }})' | |
runs-on: ubuntu-20.04 | |
container: | |
image: ubuntu:14.04 | |
strategy: | |
fail-fast: false | |
matrix: | |
os: [ubuntu-14.04] | |
install_mdns: [true] | |
use_conan: [false] | |
force_cpprest_asio: [false] | |
dns_sd_mode: [multicast] | |
enable_authorization: [true] | |
steps: | |
- uses: taiki-e/checkout-action@v1 | |
- name: set environment variables | |
shell: bash | |
run: | | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "BUILD_NAME=${{ matrix.os }}_mdns_${{ matrix.dns_sd_mode }}" >> $GITHUB_ENV | |
else | |
echo "BUILD_NAME=${{ matrix.os }}_avahi_${{ matrix.dns_sd_mode }}" >> $GITHUB_ENV | |
fi | |
elif [[ "${{ matrix.force_cpprest_asio }}" == "true" ]]; then | |
echo "BUILD_NAME=${{ matrix.os }}_asio" >> $GITHUB_ENV | |
else | |
echo "BUILD_NAME=${{ matrix.os }}" >> $GITHUB_ENV | |
fi | |
GITHUB_COMMIT=`echo "${{ github.sha }}" | cut -c1-7` | |
echo "GITHUB_COMMIT=$GITHUB_COMMIT" >> $GITHUB_ENV | |
# github.workspace points to the host path not the docker path, the home directory defaults to the workspace directory | |
echo "GITHUB_WORKSPACE=`pwd`" >> $GITHUB_ENV | |
cd .. | |
echo "RUNNER_WORKSPACE=`pwd`" >> $GITHUB_ENV | |
- name: install build tools | |
shell: bash | |
run: | | |
apt-get update -q | |
apt-get install -y software-properties-common | |
apt-get --allow-unauthenticated update -q | |
apt-get --allow-unauthenticated install -y curl g++ git make patch zlib1g-dev libssl-dev bsdmainutils dnsutils unzip | |
# ubuntu-14.04 ca-certificates are out of date | |
git config --global http.sslVerify false | |
# build and install openssl | |
curl -OsSkL https://openssl.org/source/old/1.1.1/openssl-1.1.1v.tar.gz | |
tar xzf openssl-1.1.1v.tar.gz | |
cd openssl-1.1.1v | |
./config --prefix=/usr/local/custom-openssl --libdir=lib --openssldir=/etc/ssl | |
make -j1 depend | |
make -j8 | |
make install_sw | |
cd .. | |
# install ffi.h, which is required for python build | |
apt install libffi-dev | |
# build and install python | |
curl -sSk https://www.python.org/ftp/python/3.11.5/Python-3.11.5.tar.xz | tar -xJ | |
cd Python-3.11.5 | |
./configure -C --with-openssl=/usr/local/custom-openssl --with-openssl-rpath=auto | |
make -j8 | |
make install | |
update-alternatives --install /usr/bin/python3 python3 /usr/local/bin/python3.11 3 | |
ln -s /usr/local/bin/python3.11 /usr/bin/python | |
curl -sS https://bootstrap.pypa.io/pip/3.6/get-pip.py | python | |
curl -sS https://nodejs.org/dist/v12.16.2/node-v12.16.2-linux-x64.tar.xz | tar -xJ | |
echo "`pwd`/node-v12.16.2-linux-x64/bin" >> $GITHUB_PATH | |
echo "${{ matrix.os }}" | |
if [[ "${{ matrix.os }}" == "ubuntu-14.04" ]]; then | |
# Install CMake | |
curl -L http://www.cmake.org/files/v3.24/cmake-3.24.2.tar.gz -s | tar -xvzf | |
cd cmake-3.24.2/ | |
./configure | |
make | |
make install | |
sudo update-alternatives --install /usr/bin/cmake cmake /usr/local/bin/cmake 1 --force | |
# Install ninja | |
curl -L https://github.com/ninja-build/ninja/releases/download/v1.12.1/ninja-linux.zip -s | |
sudo unzip ninja-linux.zip -d /usr/local/bin/ | |
sudo update-alternatives --install /usr/bin/ninja ninja /usr/local/bin/ninja 1 --force | |
fi | |
- name: setup google credentials | |
if: env.SECRET_GOOGLE_CREDENTIALS | |
shell: bash | |
working-directory: ${{ env.GITHUB_WORKSPACE }} | |
run: | | |
mkdir -p gdrive | |
echo "${{ env.SECRET_GOOGLE_CREDENTIALS }}" | openssl base64 -d -A -out gdrive/credentials.json | |
echo "GDRIVE_CREDENTIALS=`pwd`/gdrive/credentials.json" >> $GITHUB_ENV | |
- name: install conan | |
if: matrix.use_conan == true | |
run: | | |
pip install conan~=2.4.1 | |
- name: install cmake | |
if: matrix.os != 'ubuntu-14.04' | |
uses: lukka/[email protected] | |
- name: setup bash path | |
working-directory: ${{ env.GITHUB_WORKSPACE }} | |
shell: bash | |
run: | | |
# translate GITHUB_WORKSPACE into a bash path from a windows path | |
workspace_dir=`pwd` | |
echo "GITHUB_WORKSPACE_BASH=${workspace_dir}" >> $GITHUB_ENV | |
- name: windows setup | |
if: runner.os == 'Windows' | |
run: | | |
# set compiler to cl.exe to avoid building with gcc. | |
echo "CMAKE_COMPILER_ARGS=-DCMAKE_C_COMPILER=cl.exe -DCMAKE_CXX_COMPILER=cl.exe" >> $env:GITHUB_ENV | |
# disable unused network interface | |
netsh interface set interface name="vEthernet (nat)" admin=DISABLED | |
# get host IP address | |
$env:hostip = ( | |
Get-NetIPConfiguration | | |
Where-Object { | |
$_.IPv4DefaultGateway -ne $null -and | |
$_.NetAdapter.Status -ne "Disconnected" | |
} | |
).IPv4Address.IPAddress | |
echo "HOST_IP_ADDRESS=$env:hostip" >> $env:GITHUB_ENV | |
ipconfig | |
# add the CRL Distribution Point to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
# and avoid SSL Error: WINHTTP_CALLBACK_STATUS_FLAG_CERT_REV_FAILED failed to check revocation status. | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip crl.testsuite.nmos.tv`n" | |
# add the OCSP server to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip ocsp.testsuite.nmos.tv`n" | |
# add nmos-api.local to hosts to workaround mDNS lookups on windows being very slow and causing the AMWA test suite to take 2-3 hours to complete | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip nmos-api.local`n" | |
# add nmos-mocks.local to hosts to workaround mDNS lookups on windows being very slow and causing the AMWA test suite IS-04-01 test_05 to fail due to latency messing up the apparent heart beat interval | |
Add-Content $env:WINDIR\System32\Drivers\Etc\Hosts "`n$env:hostip nmos-mocks.local`n" | |
# Configure SCHANNEL, e.g. to disable TLS 1.0 and TLS 1.1 | |
reg import ${{ env.GITHUB_WORKSPACE }}/Sandbox/configure_schannel.reg | |
- name: windows install bonjour | |
if: runner.os == 'Windows' | |
run: | | |
# download bonjour installer | |
curl -L https://download.info.apple.com/Mac_OS_X/061-8098.20100603.gthyu/BonjourPSSetup.exe -o BonjourPSSetup.exe -q | |
& 7z.exe e BonjourPSSetup.exe Bonjour64.msi -y | |
msiexec /i ${{ env.GITHUB_WORKSPACE }}\Bonjour64.msi /qn /norestart | |
- name: mac setup | |
if: runner.os == 'macOS' | |
run: | | |
hostip=$(ipconfig getifaddr en0) | |
echo "HOST_IP_ADDRESS=$hostip" >> $GITHUB_ENV | |
active_xcode_version=`xcode-select -p` | |
echo "SDKROOT=${active_xcode_version}/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk" >> $GITHUB_ENV | |
ifconfig | |
echo "CTEST_EXTRA_ARGS=$CTEST_EXTRA_ARGS -E testMdnsResolveAPIs" >> $GITHUB_ENV | |
echo "CTEST_EXPECTED_FAILURES=$CTEST_EXPECTED_FAILURES -R testMdnsResolveAPIs" >> $GITHUB_ENV | |
# add the CRL Distribution Point and the OCSP server to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
echo -e "$hostip crl.testsuite.nmos.tv\n$hostip ocsp.testsuite.nmos.tv" | sudo tee -a /etc/hosts > /dev/null | |
# testssl.sh needs "timeout" | |
brew install coreutils | |
- name: mac docker install | |
# installs docker on a mac runner. Github's documentation states docker is already available so this shouldn't be necessary | |
# can be used to run AWMA test suite but test suite doesn't seem to be able to communicate with nodes running on the host | |
if: false | |
run: | | |
brew install docker docker-compose docker-machine xhyve docker-machine-driver-xhyve | |
sudo chown root:wheel $(brew --prefix)/opt/docker-machine-driver-xhyve/bin/docker-machine-driver-xhyve | |
sudo chmod u+s $(brew --prefix)/opt/docker-machine-driver-xhyve/bin/docker-machine-driver-xhyve | |
mkdir -p /Users/runner/.docker/machine/cache/ | |
# workaround "docker-machine" failing to download boot2docker.iso | |
curl -Lo ~/.docker/machine/cache/boot2docker.iso https://github.com/boot2docker/boot2docker/releases/download/v19.03.5/boot2docker.iso | |
i=0 | |
while ! docker-machine "--github-api-token=${{ secrets.GITHUB_TOKEN }}" create default --driver xhyve; do | |
docker-machine rm -f default | |
sleep 1 | |
$(( i++ )) | |
if [[ $i -gt 5 ]]; then | |
exit 1 | |
fi | |
done | |
eval $(docker-machine env default) | |
echo "DOCKER_MACHINE_NAME=$DOCKER_MACHINE_NAME" >> $GITHUB_ENV | |
echo "DOCKER_TLS_VERIFY=$DOCKER_TLS_VERIFY" >> $GITHUB_ENV | |
echo "DOCKER_HOST=$DOCKER_HOST" >> $GITHUB_ENV | |
echo "DOCKER_CERT_PATH=$DOCKER_CERT_PATH" >> $GITHUB_ENV | |
- name: ubuntu setup | |
if: runner.os == 'Linux' | |
run: | | |
sudo ip addr flush dev docker0 || echo "remove docker ip failed" | |
hostip=$(hostname -I | cut -f1 -d' ') | |
echo "HOST_IP_ADDRESS=$hostip" >> $GITHUB_ENV | |
ip address | |
# add the CRL Distribution Point and the OCSP server to hosts so that it's discoverable when running the AMWA test suite in mDNS mode | |
echo -e "$hostip crl.testsuite.nmos.tv\n$hostip ocsp.testsuite.nmos.tv" | sudo tee -a /etc/hosts > /dev/null | |
# re-synchronize the package index | |
sudo apt-get update -q | |
- name: ubuntu mdns install | |
if: runner.os == 'Linux' && matrix.install_mdns == true | |
run: | | |
cd ${{ env.GITHUB_WORKSPACE }} | |
mkdir mDNSResponder | |
cd mDNSResponder | |
curl -L https://github.com/apple-oss-distributions/mDNSResponder/archive/mDNSResponder-878.200.35.tar.gz -s | tar -xvzf - --strip-components=1 > /dev/null | |
patch -p1 < ${{ env.GITHUB_WORKSPACE }}/Development/third_party/mDNSResponder/unicast.patch | |
patch -p1 < ${{ env.GITHUB_WORKSPACE }}/Development/third_party/mDNSResponder/permit-over-long-service-types.patch | |
patch -p1 < ${{ env.GITHUB_WORKSPACE }}/Development/third_party/mDNSResponder/poll-rather-than-select.patch | |
cd mDNSPosix | |
make os=linux && sudo make os=linux install | |
# install Name Service Cache Daemon to speed up repeated mDNS name discovery | |
sudo apt-get install -f nscd | |
if [ -f /.dockerenv ]; then | |
# nscd doesn't run automatically under docker | |
mkdir -p /var/run/nscd | |
nscd | |
fi | |
# force dependency on mDNSResponder | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }} -DNMOS_CPP_USE_AVAHI:BOOL=\"0\"" >> $GITHUB_ENV | |
- name: ubuntu non-conan setup | |
if: runner.os == 'Linux' && matrix.use_conan == false | |
run: | | |
sudo apt-get install -y \ | |
libboost-chrono-dev \ | |
libboost-date-time-dev \ | |
libboost-regex-dev \ | |
libboost-system-dev \ | |
libboost-thread-dev \ | |
libboost-random-dev \ | |
libboost-filesystem-dev \ | |
openssl \ | |
libssl-dev | |
cd ${{ env.RUNNER_WORKSPACE }} | |
git clone --recurse-submodules --branch v2.10.19 https://github.com/Microsoft/cpprestsdk | |
cd cpprestsdk/Release | |
mkdir build | |
cd build | |
cmake .. -DCMAKE_BUILD_TYPE:STRING="Release" -DWERROR:BOOL="0" -DBUILD_SAMPLES:BOOL="0" -DBUILD_TESTS:BOOL="0" | |
make -j 2 && sudo make install | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }}" \ | |
"-DWEBSOCKETPP_INCLUDE_DIR:PATH=\"${{ env.RUNNER_WORKSPACE }}/cpprestsdk/Release/libs/websocketpp\"" \ | |
"-DNMOS_CPP_USE_SUPPLIED_JSON_SCHEMA_VALIDATOR:BOOL=\"1\"" \ | |
"-DNMOS_CPP_USE_SUPPLIED_JWT_CPP:BOOL=\"1\"" \ | |
>> $GITHUB_ENV | |
- name: ubuntu avahi setup | |
if: runner.os == 'Linux' && matrix.install_mdns == false | |
run: | | |
sudo apt-get install -f libavahi-compat-libdnssd-dev libnss-mdns avahi-utils | |
echo "CTEST_EXTRA_ARGS=$CTEST_EXTRA_ARGS -E testMdnsAdvertiseAddress" >> $GITHUB_ENV | |
echo "CTEST_EXPECTED_FAILURES=$CTEST_EXPECTED_FAILURES -R testMdnsAdvertiseAddress" >> $GITHUB_ENV | |
# make avahi only respond on the "eth0" interface | |
sudo sed -i 's/#*allow-interfaces=.*/allow-interfaces=eth0/g' /etc/avahi/avahi-daemon.conf | |
sudo systemctl restart avahi-daemon | |
# install Name Service Cache Daemon to speed up repeated mDNS name discovery | |
sudo apt-get install -f nscd | |
# force dependency on avahi | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }} -DNMOS_CPP_USE_AVAHI:BOOL=\"1\"" >> $GITHUB_ENV | |
- name: force cpprest asio | |
if: matrix.force_cpprest_asio == true && matrix.use_conan == true | |
shell: bash | |
run: | | |
echo "CONAN_INSTALL_EXTRA_ARGS=--options\;cpprestsdk/*:http_client_impl=asio\;--options\;cpprestsdk/*:http_listener_impl=asio" >> $GITHUB_ENV | |
- name: enable conan | |
if: matrix.use_conan == true | |
shell: bash | |
run: | | |
echo "CMAKE_EXTRA_ARGS=${{ env.CMAKE_EXTRA_ARGS }}" \ | |
"-DCMAKE_PROJECT_TOP_LEVEL_INCLUDES:STRING=\"third_party/cmake/conan_provider.cmake\"" \ | |
"-DCONAN_INSTALL_ARGS:STRING=\"--build=missing\;${{ env.CONAN_INSTALL_EXTRA_ARGS }}\;--lockfile-out=conan.lock\"" \ | |
>> $GITHUB_ENV | |
cat $GITHUB_ENV | |
- name: setup developer command prompt for Microsoft Visual C++ | |
if: runner.os == 'Windows' | |
uses: ilammy/msvc-dev-cmd@v1 | |
- name: build | |
uses: lukka/[email protected] | |
with: | |
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced | |
cmakeListsTxtPath: '${{ env.GITHUB_WORKSPACE }}/Development/CMakeLists.txt' | |
buildDirectory: '${{ env.RUNNER_WORKSPACE }}/build/' | |
cmakeAppendedArgs: '-GNinja -DCMAKE_BUILD_TYPE=Release -DCMAKE_INSTALL_PREFIX="${{ env.RUNNER_WORKSPACE }}/install" ${{ env.CMAKE_COMPILER_ARGS }} ${{ env.CMAKE_EXTRA_ARGS }}' | |
- name: dump conan lockfile | |
if: matrix.use_conan == true | |
run: | | |
cat ${{ env.RUNNER_WORKSPACE }}/build/conan.lock | |
- name: unit test | |
run: | | |
cd ${{ env.RUNNER_WORKSPACE }}/build/ | |
ctest --output-on-failure ${{ env.CTEST_EXTRA_ARGS }} | |
- name: expected unit test failures | |
if: env.CTEST_EXPECTED_FAILURES != null | |
continue-on-error: true | |
run: | | |
cd ${{ env.RUNNER_WORKSPACE }}/build/ | |
ctest --output-on-failure ${{ env.CTEST_EXPECTED_FAILURES }} | |
- name: install | |
uses: lukka/[email protected] | |
with: | |
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced | |
cmakeListsTxtPath: '${{ env.GITHUB_WORKSPACE }}/Development/CMakeLists.txt' | |
buildDirectory: '${{ env.RUNNER_WORKSPACE }}/build/' | |
buildWithCMakeArgs: '--target install' | |
- name: set install test environment variable | |
shell: bash | |
run: | | |
# replace backslashes with forward slashes on Windows | |
echo "CMAKE_WORKSPACE=${RUNNER_WORKSPACE//\\/\/}" >> $GITHUB_ENV | |
- name: install test | |
if: runner.os != 'macOS' | |
uses: lukka/[email protected] | |
with: | |
cmakeListsOrSettingsJson: CMakeListsTxtAdvanced | |
cmakeListsTxtPath: '${{ env.GITHUB_WORKSPACE }}/Sandbox/my-nmos-node/CMakeLists.txt' | |
buildDirectory: '${{ env.RUNNER_WORKSPACE }}/build-my-nmos-node/' | |
cmakeAppendedArgs: '-GNinja | |
-DCMAKE_BUILD_TYPE=Release | |
-DCMAKE_FIND_PACKAGE_PREFER_CONFIG="1" | |
-DCMAKE_MODULE_PATH="${{ env.CMAKE_WORKSPACE }}/build/conan" | |
-DCMAKE_PREFIX_PATH="${{ env.CMAKE_WORKSPACE }}/install" | |
-DCMAKE_INSTALL_PREFIX="${{ env.CMAKE_WORKSPACE }}/build/conan" | |
${{ env.CMAKE_COMPILER_ARGS }}' | |
- name: install test log | |
if: runner.os != 'macOS' | |
run: | | |
# dump the log file created in Sandbox/my-nmos-node/CMakeLists.txt | |
cat ${{ env.RUNNER_WORKSPACE }}/build-my-nmos-node/my-nmos-node_include-release.txt | |
- name: install wsl | |
if: runner.os == 'Windows' | |
run: | | |
& curl -L https://aka.ms/wsl-ubuntu-1804 -o ubuntu-1804.appx | |
Rename-Item .\ubuntu-1804.appx .\ubuntu-1804.zip | |
Expand-Archive .\ubuntu-1804.zip .\ubuntu-1804 | |
cd ubuntu-1804 | |
.\ubuntu1804.exe install --root | |
- name: AMWA test suite | |
shell: bash | |
working-directory: ${{ env.RUNNER_WORKSPACE }} | |
run: | |
| | |
set -x | |
root_dir=`pwd` | |
# Install AMWA NMOS Testing Tool | |
git clone https://github.com/AMWA-TV/nmos-testing.git | |
cd nmos-testing | |
# Configure the Testing Tool so all APIs are tested with TLS and authorization | |
printf "from . import Config as CONFIG\nCONFIG.ENABLE_HTTPS = True\nCONFIG.MOCK_SERVICES_WARM_UP_DELAY = 30\nCONFIG.HTTP_TIMEOUT = 2\n" > nmostesting/UserConfig.py | |
# Set the DNS-SD mode | |
printf 'CONFIG.DNS_SD_MODE = "'${{ matrix.dns_sd_mode }}'"\n' >> nmostesting/UserConfig.py | |
# Set the client JWKS_URI for mock Authorization Server to obtain the client JSON Web Key Set (public keys) to verify the client_assertion, when the client is requesting the access token | |
if [[ "${{ matrix.dns_sd_mode }}" == "multicast" ]]; then | |
hostname=nmos-api.local | |
else | |
hostname=api.testsuite.nmos.tv | |
fi | |
printf 'CONFIG.JWKS_URI = "https://'${hostname}':1080/x-authorization/jwks"\n' >> nmostesting/UserConfig.py | |
if [[ "${{matrix.enable_authorization}}" == "true" ]]; then | |
printf 'CONFIG.ENABLE_AUTH = True\n' >> nmostesting/UserConfig.py | |
else | |
printf 'CONFIG.ENABLE_AUTH = False\n' >> nmostesting/UserConfig.py | |
fi | |
# Download testssl | |
cd testssl | |
curl -L https://github.com/drwetter/testssl.sh/archive/v3.0.7.tar.gz -s | tar -xvzf - --strip-components=1 > /dev/null | |
cd .. | |
# Create output directories | |
mkdir results | |
mkdir badges | |
if [[ "${{ env.DOCKER_TEST_SUITE }}" == "true" ]]; then | |
# run test suite in amwa/nmos-testing docker container | |
docker pull amwa/nmos-testing | |
docker run -d --name "nmos_testing" --entrypoint="/usr/bin/tail" -v `pwd`/results:/home/nmos-testing/results amwa/nmos-testing -f /dev/null | |
run_python="docker exec -i nmos_testing python3" | |
elif [[ "${{ env.VAGRANT_TEST_SUITE }}" == "true" ]]; then | |
# run test suite in vagrant VM | |
cp ${{ env.GITHUB_WORKSPACE_BASH }}/.github/workflows/mac_Vagrantfile ./Vagrantfile | |
vagrant plugin install vagrant-scp | |
vagrant up | |
vagrant ssh -- mkdir results | |
run_python="vagrant ssh -- python3" | |
elif [[ "${{ runner.os }}" == "Linux" && "${{ matrix.dns_sd_mode }}" == "unicast" ]]; then | |
# run test suite directly | |
sudo pip install --upgrade -r requirements.txt | |
# install SDPoker | |
npm install -g git+https://[email protected]/AMWA-TV/sdpoker.git | |
run_python="sudo python" | |
else | |
# run test suite directly | |
pip install -r requirements.txt | |
# install SDPoker | |
npm install -g git+https://[email protected]/AMWA-TV/sdpoker.git | |
run_python="python" | |
fi | |
pip install -r utilities/run-test-suites/gsheetsImport/requirements.txt | |
if [[ "${{ runner.os }}" == "Windows" ]]; then | |
# install certificates | |
certutil -enterprise -addstore -user root test_data\\BCP00301\\ca\\certs\\ca.cert.pem | |
certutil -enterprise -addstore -user ca test_data\\BCP00301\\ca\\intermediate\\certs\\intermediate.cert.pem | |
certutil -importpfx -enterprise test_data\\BCP00301\\ca\\intermediate\\certs\\ecdsa.api.testsuite.nmos.tv.cert.chain.pfx | |
certutil -importpfx -enterprise test_data\\BCP00301\\ca\\intermediate\\certs\\rsa.api.testsuite.nmos.tv.cert.chain.pfx | |
# RSA | |
netsh http add sslcert ipport=0.0.0.0:1080 certhash=021d50df2177c07095485184206ee2297e50b65c appid="{00000000-0000-0000-0000-000000000000}" | |
# ECDSA | |
#netsh http add sslcert ipport=0.0.0.0:1080 certhash=875eca592c49120254b32bb8bed90ac3679015a5 appid="{00000000-0000-0000-0000-000000000000}" | |
# RSA | |
netsh http add sslcert ipport=0.0.0.0:8088 certhash=021d50df2177c07095485184206ee2297e50b65c appid="{00000000-0000-0000-0000-000000000000}" | |
# ECDSA | |
#netsh http add sslcert ipport=0.0.0.0:8088 certhash=875eca592c49120254b32bb8bed90ac3679015a5 appid="{00000000-0000-0000-0000-000000000000}" | |
fi | |
if [[ "${{ runner.os }}" == "macOS" ]]; then | |
# force DNS lookups to IPv4 as mDNS lookups on macos seem to wait for the IPv6 lookup to timeout before returning the IPv4 result | |
mv nmostesting/GenericTest.py nmostesting/GenericTest.py.old | |
printf 'import socket\nold_getaddrinfo = socket.getaddrinfo\ndef new_getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):\n return old_getaddrinfo(host, port, socket.AF_INET, type, proto, flags)\nsocket.getaddrinfo = new_getaddrinfo\n' > nmostesting/GenericTest.py | |
cat nmostesting/GenericTest.py.old >> nmostesting/GenericTest.py | |
fi | |
if [[ "${{ runner.os }}" == "Linux" && "${{ matrix.use_conan }}" == "false" ]]; then | |
# ubuntu 14 non-conan build uses boost 1.54.0 which doesn't support disabling TLS 1.1 | |
mkdir -p ${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/nmos-testing-options/ | |
echo "--ignore test_01" > ${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/nmos-testing-options/BCP-003-01.txt | |
echo "1" > ${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/nmos-testing-options/BCP-003-01_max_disabled.txt | |
fi | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Stopping mdnsd" | |
sudo /etc/init.d/mdns stop | |
else | |
echo "Stopping avahi-daemon" | |
sudo systemctl stop avahi-daemon | |
fi | |
fi | |
if [[ "${{ matrix.dns_sd_mode }}" == "multicast" ]]; then | |
domain=local | |
else | |
domain=testsuite.nmos.tv | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
# add host names | |
echo -e "${{ env.HOST_IP_ADDRESS }} api.$domain\n${{ env.HOST_IP_ADDRESS }} mocks.$domain" | sudo tee -a /etc/hosts > /dev/null | |
# force testing tool to cache specification repos before changing the resolver configuration | |
$run_python nmos-test.py suite IS-04-01 --selection auto --host ${{ env.HOST_IP_ADDRESS }} --port 444 --version v1.3 || true | |
# change the resolver configuration to use only the testing tool's mock DNS server | |
# and instead configure the mock DNS server to use an upstream DNS server | |
# as unicast DNS-SD test results are inconsistent if other servers are also configured | |
dns_upstream_ip=$(cat /etc/resolv.conf | grep ^nameserver | tr -s [:space:] ' ' | cut -f2 -d ' ' -s) | |
if [[ ! -z "$dns_upstream_ip" ]]; then | |
printf 'CONFIG.DNS_UPSTREAM_IP = "'${dns_upstream_ip}'"\n' >> nmostesting/UserConfig.py | |
fi | |
sudo cp /etc/resolv.conf /etc/resolv.conf.bak | |
echo -e "nameserver ${{ env.HOST_IP_ADDRESS }}" | sudo tee /etc/resolv.conf > /dev/null | |
else | |
echo "Unicast DNS-SD testing not yet supported on ${{ runner.os }}" && false | |
fi | |
fi | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Restarting mdnsd" | |
sudo /etc/init.d/mdns start | |
#sudo /usr/sbin/mdnsd -debug & | |
sleep 2 | |
dns-sd -V | |
else | |
echo "Restarting avahi-daemon" | |
sudo systemctl start avahi-daemon | |
sleep 2 | |
ps -e | grep avahi-daemon | |
avahi-daemon -V | |
fi | |
fi | |
if [[ "${{ runner.os }}" == "Linux" && "${{ matrix.install_mdns }}" == "false" ]]; then | |
# nmos-cpp-node doesn't currently support advertising hostnames to Avahi | |
avahi-publish -a -R nmos-api.local ${{ env.HOST_IP_ADDRESS }} & | |
fi | |
${{ env.GITHUB_WORKSPACE_BASH }}/Sandbox/run_nmos_testing.sh "$run_python" ${domain} ${root_dir}/build/nmos-cpp-node ${root_dir}/build/nmos-cpp-registry results badges $GITHUB_STEP_SUMMARY ${{ env.HOST_IP_ADDRESS }} "${{ env.GITHUB_COMMIT }}-${{ env.BUILD_NAME }}-" | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
ps -e | grep mdnsd || true | |
else | |
ps -e | grep avahi-daemon || true | |
fi | |
fi | |
if [[ "${{ matrix.dns_sd_mode }}" == "unicast" ]]; then | |
if [[ "${{ runner.os }}" == "Linux" ]]; then | |
# restore DNS Server | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Stopping mdnsd" | |
sudo /etc/init.d/mdns stop | |
else | |
echo "Stopping avahi-daemon" | |
sudo systemctl stop avahi-daemon | |
fi | |
cat /etc/resolv.conf.bak | sudo tee /etc/resolv.conf > /dev/null | |
if [[ "${{ matrix.install_mdns }}" == "true" ]]; then | |
echo "Restarting mdnsd" | |
sudo /etc/init.d/mdns start | |
else | |
echo "Restarting avahi-daemon" | |
sudo systemctl start avahi-daemon | |
fi | |
fi | |
fi | |
if [[ "${{ env.DOCKER_TEST_SUITE }}" == "true" ]]; then | |
docker stop nmos_testing | |
docker rm nmos_testing | |
fi | |
if [[ "${{ env.VAGRANT_TEST_SUITE }}" == "true" ]]; then | |
vagrant scp :results/* results/ | |
vagrant destroy -f | |
fi | |
exit 0 | |
- name: upload to google sheets | |
if: github.ref == 'refs/heads/master' && github.event_name == 'push' | |
working-directory: ${{ env.RUNNER_WORKSPACE }} | |
shell: bash | |
run: | | |
export SHEET=https://docs.google.com/spreadsheets/d/${{ env.SECRET_RESULTS_SHEET_ID }} | |
python nmos-testing/utilities/run-test-suites/gsheetsImport/resultsImporter.py --credentials ${{ env.GDRIVE_CREDENTIALS }} --sheet "$SHEET" --insert --json nmos-testing/results/${{ env.GITHUB_COMMIT }}-*.json || echo "upload failed" | |
- uses: actions/[email protected] | |
with: | |
name: ${{ env.BUILD_NAME }}_badges | |
path: ${{ runner.workspace }}/nmos-testing/badges | |
- uses: actions/[email protected] | |
with: | |
name: ${{ env.BUILD_NAME }}_results | |
path: ${{ runner.workspace }}/nmos-testing/results | |
make_badges: | |
if: github.ref == 'refs/heads/master' && github.event_name == 'push' | |
needs: [build_and_test, build_and_test_ubuntu_14] | |
runs-on: ubuntu-20.04 | |
steps: | |
- uses: actions/checkout@v3 | |
- name: set environment variables | |
shell: bash | |
run: | | |
GITHUB_COMMIT=`echo "${{ github.sha }}" | cut -c1-7` | |
echo "GITHUB_COMMIT=$GITHUB_COMMIT" >> $GITHUB_ENV | |
echo "GITHUB_WORKSPACE=${{ github.workspace }}" >> $GITHUB_ENV | |
echo "RUNNER_WORKSPACE=${{ runner.workspace }}" >> $GITHUB_ENV | |
- uses: actions/[email protected] | |
with: | |
path: ${{ runner.workspace }}/artifacts | |
- name: make badges | |
run: | | |
# combine badges from all builds, exclude macos-12 | |
${{ github.workspace }}/Sandbox/make_badges.sh ${{ github.workspace }} ${{ runner.workspace }}/artifacts macos-12_auth macos-12_noauth | |
# force push to github onto an orphan 'badges' branch | |
cd ${{ github.workspace }} | |
git checkout --orphan badges-${{ env.GITHUB_COMMIT }} | |
git rm -rfq --ignore-unmatch . | |
git add *.svg | |
git remote set-url --push `git remote` https://x-access-token:${{ secrets.GITHUB_TOKEN }}@github.com/${{ github.repository }} | |
git config --global user.name 'test-results-uploader' | |
git config --global user.email '[email protected]' | |
git commit -qm "Badges for README at ${{ env.GITHUB_COMMIT }}" | |
git push -f `git remote` badges-${{ env.GITHUB_COMMIT }}:badges |