diff --git a/.github/README.md b/.github/README.md new file mode 100644 index 00000000000..54a93d93674 --- /dev/null +++ b/.github/README.md @@ -0,0 +1,239 @@ +# Github configuration + +This file provides a brief explanation of the organization of rippled's +Github configuration for actions, workflows, and templates. + +## Templates + +The `ISSUE_TEMPLATE` folder holds several files used to improve the +experience of creating an issue. +* `config.yml` configures external links to XRPL resources that are provided + when a user starts the process of creating a new issue. Currently, that + includes XRPL documentation and Ripple's bug bounty program. +* `bug_report.md` is the template used to create bug reports. +* `feature_request.md` is the template used to create feature requests. + +Additionally, `pull_request_template.md` in this folder is the template +used when opening Pull Requests. + +## Workflows + +The `workflows` folder holds several files that describe Github Actions +workflows. + +### Documentation + +`doxygen.yml` builds and [publishes](http://ripple.github.io/rippled/) the +rippled developer documentation using `doxygen` whenever a new commit or +commits are published to the `develop` branch. + +### Code formatting + +`clang-format.yml` checks that source code is correctly formatted. It runs +all rippled source files through `clang-format`, and fails if any changes +are found. If it fails, it publishes a patch file as an artifact that the +developer can apply to their branch, as well as instructions on how to use +it. + +### Levelization checking + +`levelization.yml` checks for changes in source code levelization, and +raises an alert if any changes are found (good or bad). See [the levelization +documentation](../Builds/levelization/README.md) for more information. + +### Continuous integration + +For some simplicity the continuous +integration tasks were split into three different workflows: one job each for +Linux, MacOS, and Windows. One file describes one workflow. + +The workflows are generally built using some or all of the following stages: + +1. Build and/or download common dependencies, and store them in a cache + intended to be shared by later stages. For example, boost, vcpkg, or the + external projects required by our `cmake` configuration. +2. Build and test "base" configurations. A base configuration is one that + builds `rippled` using default settings other than compiler, + debug/release, and unity/non-unity. +3. Use the cache from the base configuration to build and usually test a + special case configuration (e.g. Coverage or Reporting mode). +4. Use the `rippled` "base" artifact to run a non-default test scenario (e.g. + manual tests, or IPv6 tests). + +Every build and test job, whether base or special case, will store some of +the intermediate build-results (e.g. NIH cache, `ccache` folders, vcpkg +libraries) in a job-specific cache. Additionally, each build and test job +will upload two artifacts: The cmake logs, and the `rippled` executable built +by the job. + +#### Linux + +1. `linux-general.yml` stages: + 1. Pre-downloads dependencies into "NIH" ("Not Invented Here") caches. + 2. Builds and tests `rippled` using all of the combinations of the + following. + * gcc-8, gcc-9, clang-8, clang-9, and clang-10 + * Debug and Release + * unity and non-unity + 3. Special cases: + a. For clang-8, Debug, unity, uses the cache from the base stage to + build and test the following special configurations: + * Reporting + * Coverage + b. For clang-8, Release, unity, uses the cache from the base stage to + build and test the following special configurations: + * Address sanitizer (asan) + * Undefined behavior sanitizer (ubsan) + * A thread sanitizer (tsan) job is defined, but disabled, because it + currently fails to run. + c. For gcc-8, Debug unity, uses the cache from the base stage to build + and test the following special configurations: + * Coverage + * Non-static + * Non-static with shared libraries + * Makefile (instead of Ninja) + * Minimum cmake version + * The separate + [`validator-keys`](https://github.com/ripple/validator-keys-tool) + application + 4. Manual tests: + a. For gcc-8, Debug unity, uses the `rippled` artifact from the base job + to run all of the manual + tests defined in `rippled` except a few that use too much memory to + run on Github's hosts. + b. For gcc-8, Release, unity, uses the `rippled` artifact from the base + job to run all of the manual + tests defined in `rippled` except a few that use too much memory to + run on Github's hosts. + +#### MacOS + +6. `macos.yml` stages: + 1. Pre-downloads dependencies into an "NIH" ("Not Invented Here") cache, + **and** builds Boost into a separate cache if not already cached. Two + caches are used because Boost changes very rarely, and is not modified + by the `rippled` build process, so it can be used across many + different jobs without conflict, and saving space. + 2. Builds and tests `rippled` using: + * Debug and Release + 3. There is no stage 3 in this workflow. + 4. Uses the Debug `rippled` artifact from the base job to run the IPv6 + tests using the `--unittest-ipv6` command line parameter. + +#### Windows + +7. `windows.yml` stages: + 1. Pre-downloads dependencies into an "NIH" ("Not Invented Here") cache, + **and** builds vcpkg library dependencies and boost into separate + caches if not already cached. Three caches are used because the + vcpkg and boost libraries change very rarely, and are not modified + by the `rippled` build process, so they can be used across many + different jobs without conflict, and avoid duplication across jobs. + 2. Builds and tests `rippled` using the following configurations. + (Note that MSVC 2019 Debug builds do not run tests by + default due to unresolved issues with how it handles `static + constexpr char*` variables. The tests can be forced to run by + including "ci_run_win" in the git commit message.) + * Ninja generator, MSVC 2017, Debug, unity + * Ninja generator, MSVC 2017, Release, unity + * Ninja generator, MSVC 2019, Debug, unity (without tests) + * Ninja generator, MSVC 2019, Release, unity + * Visual Studio 2019 generator, MSVC 2017, Debug, non-unity + * Visual Studio 2019 generator, MSVC 2017, Release, non-unity + * Visual Studio 2019 generator, MSVC 2019, Release, non-unity + 3. There is no stage 3 in this workflow. + 4. There is no stage 4 in this workflow. + +##### Cacheing + +###### Strategy + +[Github Actions caches](https://docs.github.com/en/actions/guides/caching-dependencies-to-speed-up-workflows) +are immutable once written, and use explicit key names for access. Caches can +be shared across workflows, and can match partial names by prefix. They can +also access caches created by base and default branches, and to the forked +from (parent) repository, but not across arbitrary forks, forked to (child) +repositories, and distinct branches. Finally, they have a relatively short +expiration time (7 days), and a relatively small size limit (5Gb). + +The caching policies used by these workflows attempt to take advantage of +these properties to save as much time as possible when building, while +minimizing space when feasible. There is almost certainly room for +improvement. + +Thus, for example, the `linux-general.yml` workflow downloads the "NIH" +dependencies into a single cache (per docker image). All of the source and +git timestamp files for these dependencies are stored in a single folder, +which is reused across jobs, and *not* duplicated across job-specific caches. + +Each "base" job stores dependency build output along with `ccache` output. +This significantly speeds up subsequent builds of the same +base job. + +Once the base job is done, any "special case" jobs (e.g. Coverage, address +sanitizer) use their base job's cache to initialize their specific caches. +This further helps reduce duplicated effort, which helps speed up those jobs. + +Finally, most caches build their cache key using values from the job +configuration and some components based on the hashes of the `cmake` +configuration, the rippled source code, and the workflow config itself. + +To pull it all together with an example, the base job in +`linux-clang8-release.yml` might have a cache key that looks like (hashes +abbreviated for readability and simplicity): +* `Linux-ecbd-clang-8-Release-ON-base-40b1-5fec-a88b` +Once that job finishes, the "asan" job's cache key would look like: +* `Linux-ecbd-clang-8-Release-ON-asan-40b1-5fec-a88b` + * It would be initialized using `Linux-ecbd-clang-8-Release-ON-base-40b1-5fec-a88b`. + +Once the whole workflow finishes, the developer makes a change to source and +pushes a new commit. The new cache key might look like. +* `Linux-ecbd-clang-8-Release-ON-base-abcd-1234-a88b` +No cache with that key is found, so it looks for +* `Linux-ecbd-clang-8-Release-ON-base-abcd-1234` +* `Linux-ecbd-clang-8-Release-ON-base-abcd` +* `Linux-ecbd-clang-8-Release-ON-base` +That last prefix matches the cache from the previous run +(`Linux-ecbd-clang-8-Release-ON-base-40b1-5fec-a88b`), and initializes +with that. Chances are that most of that cache will be useful to that new +build, and will cut the build time significantly. + +Once the base job finishes, the "asan" job's cache key would be: +* `Linux-ecbd-clang-8-Release-ON-asan-abcd-1234-a88b` +And would initialize from the just-finished +`Linux-ecbd-clang-8-Release-ON-base-abcd-1234-a88b` + +The components are organized in the following order +* Operating system: Caches aren't useful to be shared across OSes +* Hashes of the `cmake` config: Any changes to the `cmake` config can have + significant changes on the way that code is organized, dependencies are + organized, dependency folders are organized, etc., which would render the + caches incompatible. So to be safe, caches with different `cmake` configs + can never be reused. + * Additionally, this hash includes the file + `.github/workflows/cache-invalidate.txt`. This file can be manually + changed to force new builds to start with fresh caches in case some + unforseen change causes the build to fail with a reused cache. +* Compiler +* Build type (Debug/Release) +* Unity flag +* Job name +* Hash of all the header files under `src`: Because changing one header file + is more likely to affect a bunch of different object files, when this + changes, it invalidates more. +* Hash of all the source files under `src`, including headers: Changing any + source file is going to generate a new build. But with the same header + hash, a build cache that is likely to be very similar can be reused. +* Hash of the workflow instructions (the yml file, and the + `build-action/action.yml` if appropriate). If the workflow is changed + without changing any of the source, a new cache may be needed, but it can + be seeded with a previous build of the same source code. + + +## Action + +The `build-action` folder holds an `action.yml` that is used by all of the +Linux workflows to do the actual build without tons of duplication. +Unfortunately, not all types of commands can by used in an action, so there +is still some boilerplate required in each job: checkout, cache, artifacts, +plus any steps that get displayed in a separate section in the Github UI. \ No newline at end of file diff --git a/.github/build-action/action.yml b/.github/build-action/action.yml new file mode 100644 index 00000000000..7c85d0ee6f4 --- /dev/null +++ b/.github/build-action/action.yml @@ -0,0 +1,82 @@ +name: 'Build and Test' +description: 'Build and Test rippled or an alternate target' +# Pre-condition: The environment has all the necessary base +# build variables set +# WARNING: This cache cannot be used to build with a different compiler. +runs: + using: "composite" + steps: + - name: Dump-environment + shell: bash + run: | + env + set | grep = + + - name: Install ccache + shell: bash + run: | + if ! type -a ccache + then + apt install ccache + fi + + - name: Reset ccache stats + shell: bash + run: | + ccache -s + # Reset the stats so only this build counts + ccache -z + + - name: Disk space + shell: bash + run: | + df -h + if [[ -d "${CACHE_DIR}" ]] + then + du -sh ${CACHE_DIR} + fi + if [[ -d "${CCACHE_DIR}" ]] + then + du -sh ${CCACHE_DIR} + fi + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Build and test + shell: bash + run: | + export NUM_PROCESSORS=${NUM_PROCESSORS:-$( nproc )} + echo NUM_PROCESSORS is ${NUM_PROCESSORS} + if [ "${BUILD_TYPE}" == "Release" ] + then + export CMAKE_EXTRA_ARGS+=" -Dassert=ON" + fi + + # Set and/or override some environment variables based on + # inputs to the action. + if [ "${CMAKE_ADD}" != "" ] ; then export CMAKE_EXTRA_ARGS+=" ${CMAKE_ADD}"; fi + # Do the build + bin/ci/ubuntu/build-and-test.sh + + - name: Debug output + shell: bash + run: | + if [ -e build ] ; then find build \( -name CMakeOutput.log -o -name CMakeError.log \) -ls -exec cat {} \; ; fi + df -h + if [[ -d "${CACHE_DIR}" ]] + then + du -sh ${CACHE_DIR} + fi + if [[ -d "${CCACHE_DIR}" ]] + then + du -sh ${CCACHE_DIR} + fi + if [[ -d "${NIH_CACHE_ROOT}" ]] + then + find ${NIH_CACHE_ROOT} -maxdepth 3 \( -iname src -prune -o \ + -type d -exec du -sh {} \; \) + fi + + - name: Post-run ccache stats + shell: bash + run: ccache -s diff --git a/.github/workflows/cache-invalidate.txt b/.github/workflows/cache-invalidate.txt new file mode 100644 index 00000000000..8d500dca331 --- /dev/null +++ b/.github/workflows/cache-invalidate.txt @@ -0,0 +1,12 @@ +This file is used to generate part of the cache key used in the +CI-related workflows. +The contents of this file are ABSOLUTELY MEANINGLESS. +What matters is that any time the caches need to be forced +to invalidate, changing this file will be sufficient. +For convenience, a number is provided at the bottom of the file, +which can be incremented to make things simple. +If there is ever a merge conflict, simply choose the larger number. +However, to reduce the risk of conflicts, avoid including changes +to this file in any Pull Requests, unless necessary + +1 \ No newline at end of file diff --git a/.github/workflows/linux-general.yml.hold b/.github/workflows/linux-general.yml.hold new file mode 100644 index 00000000000..d2bf1a98131 --- /dev/null +++ b/.github/workflows/linux-general.yml.hold @@ -0,0 +1,713 @@ +# This workflow is out of date, but has useful information that should +# eventually be integrated into the "real" workflow. Leave this file as +# a placeholder until that is done. +# +name: "Linux build" +on: [push, pull_request] + +# The files starting with "linux-" are all interdependent. +# If any significant changes are made, be sure to update them +# to keep them in sync. + +env: + CMAKE_EXTRA_ARGS: "-Dwerr=ON -Dwextra=ON" + NINJA_BUILD: true + CACHE_DIR: ${{ github.workspace }}/_cache + NIH_CACHE_ROOT: ${{ github.workspace }}/_cache/nih_c + NIH_SRC_PATH: ${{ github.workspace }}/_srccache + PARALLEL_TESTS: true + USE_CCACHE: true + CCACHE_BASEDIR: ${{ github.workspace }} + CCACHE_NOHASHDIR: true + CCACHE_DIR: ${{ github.workspace }}/_cache/ccache + +jobs: + linux_cache: + name: Seed linux NIH dependencies ${{ matrix.compiler.image_user }}/${{ matrix.compiler.image_name }}:${{ matrix.compiler.image_tag }} + # The NIH setup puts all the dependency source code in a single, + # shared directory. Build the some of the dependency libraries - + # those which force sub-dependencies to be downloaded. + # Then put all the NIH code in a cache. This accomplishes: + # 1) This cache will be as small as possible, since the source is + # not duplicated. + # 2) This cache will be available to all builds, so they don't all + # have to separately download, for example, NuDB and grpc. + # 3) The time to download is not wasted if a build fails, even + # though a failed build will not push a cache. + # The one downside is different prerequisites are installed on different + # docker images, so there will be one cache per image. + runs-on: rippled-runner-dind + timeout-minutes: 720 + if: ${{ !contains(github.event.head_commit.message, 'ci_run_') || contains(github.event.head_commit.message, 'ci_run_linux') }} + strategy: + matrix: + compiler: + - image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + CC: gcc-9 + CXX: g++-9 + + - image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clang10" + CC: clang-10 + CXX: clang++-10 + CMAKE_ADD: " -DBoost_NO_BOOST_CMAKE=ON" + container: + image: "${{ matrix.compiler.image_user }}/${{ matrix.compiler.image_name }}:${{ matrix.compiler.image_tag }}" + outputs: + cmakeconfig: ${{ steps.cachekeys.outputs.cmakeconfig }} + headers: ${{ steps.cachekeys.outputs.headers }} + source: ${{ steps.cachekeys.outputs.source }} + instructions: ${{ steps.cachekeys.outputs.instructions }} + env: + CC: ${{ matrix.compiler.CC }} + CXX: ${{ matrix.compiler.CXX }} + CMAKE_ADD: ${{ matrix.compiler.CMAKE_ADD }} + steps: + - name: Cancel Previous Runs + # https://github.com/styfle/cancel-workflow-action/tree/0.9.0 + # Note this will not cancel jobs triggered by a pull request event + # when the PR is created from a fork, even if for the same PR. + # It works fine within the same fork, and is intended to allow + # multiple pushes during development without ending up with a + # huge backlog of jobs. + if: ${{ !contains(github.event.head_commit.message, 'ci_no_cancel') }} + uses: styfle/cancel-workflow-action@0.9.0 + with: + access_token: ${{ github.token }} + + - name: Checkout + uses: actions/checkout@v2 + + - name: Build cache keys + id: cachekeys + shell: bash + run: | + echo "::set-output name=cmakeconfig::${{ hashfiles('CMakeFiles.txt', + 'Builds/CMake/**', '.github/**/cache-invalidate.txt') }}" + echo "::set-output name=headers::${{ hashfiles('src/**/*.h', + 'src/**/*.hpp', 'src/**/*.ipp') }}" + echo "::set-output name=source::${{ hashfiles('src/**', + '!src/**/*.md', '!src/**/*.uml', '!src/**/*.png', '!src/**/*.php') }}" + echo "::set-output name=instructions::${{ hashfiles( + '.github/**/linux-*.yml', '.github/**/action.yml') }}" + + - name: Source cache + id: srccache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH }} + # If this cache key exists when the job starts, it will use that + # cache read-only. However, if it does not exist, this process will + # build a fresh cache, and try to write it out. Because this is the + # same cache key used across the Linux jobs, that may cause a + # collision. That collision is safe to ignore. + key: ${{ runner.os }}-nihsrc-${{ steps.cachekeys.outputs.cmakeconfig }}-${{ matrix.compiler.image_tag }}-${{ steps.cachekeys.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-nihsrc-${{ steps.cachekeys.outputs.cmakeconfig }}-${{ matrix.compiler.image_tag }}- + + - name: Download CMake dependencies + if: steps.buildcache.outputs.cache-hit != 'true' || steps.srccache.outputs.cache-hit != 'true' + shell: bash + run: | + cmake --version + + export BUILD_TYPE=Debug + if [ "${CMAKE_ADD}" != "" ]; then + export CMAKE_EXTRA_ARGS+="${CMAKE_ADD}" + fi + + ${CC} --version + ${CXX} --version + dir="build/$( basename ${CXX} ).${BUILD_TYPE}" + mkdir "-pv" "${dir}" + pushd "${dir}" + # Verbose or not, it's good to see the right params + # are being used + set "-x" + # rm is called if the first cmake fails, because the most + # likely cause is cache mismatch or corruption. If that happens + # reset the cache and try again. + cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + ${CMAKE_EXTRA_ARGS} ../.. || \ + ( rm -rf ${{ env.CACHE_DIR }} ${{ env.NIH_SRC_PATH }}; \ + cmake -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + ${CMAKE_EXTRA_ARGS} ../.. ) + + alldeps=( $( \ + cmake --build . --target help | \ + grep '\.\.\.' | cut -d\ -f2 | grep -v -e '\/' -e '^all' \ + -e install -e clean -e depend -e cache -e docs -e rippled \ + -e xrpl_core -e container -e '^rpm' -e '^dpkg' ) ) + echo "Building dependencies: ${alldeps[@]}" + for dep in help "${alldeps[@]}" + do + echo Building $dep + cmake --build . --target $dep --parallel + done + set +x + + popd + + - name: Disk space + shell: bash + continue-on-error: true + run: | + + df -h + du -sh ${CACHE_DIR} || true + du -sh ${CCACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Debug output + if: ${{ steps.cache.outputs.cache-hit != 'true' || failure() }} + shell: bash + continue-on-error: true + run: | + time df -h + if [ -e build ] ; then + find build \( -name CMakeOutput.log -o -name CMakeError.log \) \ + -ls -exec cat {} \; + fi + + - name: Debug output artifacts + if: ${{ steps.cache.outputs.cache-hit != 'true' || failure() }} + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output NIH dependencies ${{ matrix.compiler.image_name }}-${{ matrix.compiler.image_tag }} + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/CMakeOutput.log + build/**/CMakeError.log + + linux_base: + name: Linux ${{ matrix.compiler.CC }}, ${{ matrix.BUILD_TYPE }}, unity ${{ matrix.unity }}, ${{ matrix.name }} + needs: linux_cache + runs-on: rippled-runner-dind + timeout-minutes: 720 + strategy: + fail-fast: false + matrix: + # Current travis: gcc-8 clang-8 gcc-9 clang-9 + # testing image (2020-01-08) has clang 7,8,9 & gcc 6,7,8,9 + # packaging image (2944b78d22db) has clang 10 & gcc 8 + compiler: + - # Both images have gcc-8, but Coverage build needs 2020-01-08 + # so use that one on all gcc-8 builds, so the cache will + # always work + CC: 'gcc-8' + CXX: 'g++-8' + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + + - CC: 'gcc-9' + CXX: 'g++-9' + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + + - CC: 'clang-8' + CXX: 'clang++-8' + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + + - CC: 'clang-9' + CXX: 'clang++-9' + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + + - # The 2020-01-08 image doesn't have clang-10 + CC: 'clang-10' + CXX: 'clang++-10' + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clang10" + CMAKE_ADD: " -DBoost_NO_BOOST_CMAKE=ON" + + BUILD_TYPE: [ "Debug", "Release" ] + unity: [ "OFF", "ON" ] + name: [ "base" ] + + container: + image: "${{ matrix.compiler.image_user }}/${{ matrix.compiler.image_name }}:${{ matrix.compiler.image_tag }}" + env: + CC: ${{ matrix.compiler.CC }} + CXX: ${{ matrix.compiler.CXX }} + BUILD_TYPE: ${{ matrix.BUILD_TYPE }} + CMAKE_ADD: "-Dunity=${{ matrix.unity }} ${{ matrix.compiler.CMAKE_ADD}} ${{ matrix.CMAKE_ADD }}" + # Try to ensure at least 2Gb of space is free to run tests + MINTESTAVAIL: 2000000 + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Build cache + continue-on-error: true + uses: actions/cache@v2 + # The cache key is built on the following in order + # 1) OS. + # 2) The hash of all the CMake config files. + # 3) The build compiler + # 4) The build type + # 5) The build unity setting + # 6) The build custom name. + # 7) The hash of all the header files under the source dir. + # 8) The hash of all the code files under the source dir. + # 9) The hash of the files controlling this job + # When restoring, if the matching cache is not found, search for a usable + # cache by chopping pieces off the end of the cache key until a match is + # found, with a special case for the NIH built in the previous job + with: + path: ${{ env.CACHE_DIR }} + key: ${{ runner.os }}-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.compiler.CC }}-${{ matrix.BUILD_TYPE }}-${{ matrix.unity }}-${{ matrix.name }}-${{ needs.linux_cache.outputs.headers }}-${{ needs.linux_cache.outputs.source }}-${{ needs.linux_cache.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.compiler.CC }}-${{ matrix.BUILD_TYPE }}-${{ matrix.unity }}-${{ matrix.name }}-${{ needs.linux_cache.outputs.headers }}-${{ needs.linux_cache.outputs.source }} + ${{ runner.os }}-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.compiler.CC }}-${{ matrix.BUILD_TYPE }}-${{ matrix.unity }}-${{ matrix.name }}-${{ needs.linux_cache.outputs.headers }} + ${{ runner.os }}-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.compiler.CC }}-${{ matrix.BUILD_TYPE }}-${{ matrix.unity }}-${{ matrix.name }} + + - name: Source cache + id: cache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH}} + key: ${{ runner.os }}-nihsrc-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.compiler.image_tag }}-${{ needs.linux_cache.outputs.instructions }} + + - name: Dump environment + run: | + env + set | grep = + + - name: Reset ccache stats + shell: bash + run: | + if ! type -a ccache + then + apt install ccache + fi + ccache -s + # Reset the stats so only this build counts + ccache -z + + - name: Disk space before + shell: bash + run: | + df -h + du -sh ${CACHE_DIR} || true + du -sh ${CCACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Build and Test + id: try1 + continue-on-error: true + uses: ./.github/build-action + + - name: Retry once + id: try2 + continue-on-error: true + if: steps.try1.outcome == 'failure' + uses: ./.github/build-action + + - name: Retry twice + if: steps.try2.outcome == 'failure' + uses: ./.github/build-action + + - name: Unity files + if: failure() + shell: bash + # Unity builds sometimes cause OOM failures. + # When that happens, this may help diagnose which source file + # is the problem. + run: | + find build -iname 'unity_*.cxx' -ls -exec cat {} \; + + - name: Disk space after + shell: bash + if: always() + run: | + df -h + du -sh ${CACHE_DIR} || true + du -sh ${CCACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Post-run ccache stats + shell: bash + run: ccache -s + + - name: Debug output artifacts + if: always() + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output ${{ matrix.name }} ${{ matrix.compiler.CC }} ${{ matrix.BUILD_TYPE }} unity=${{ matrix.unity }} + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/CMakeOutput.log + build/**/CMakeError.log + + - name: rippled executable artifact + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: rippled ${{ matrix.name }} ${{ matrix.compiler.CC }} ${{ matrix.BUILD_TYPE }} unity=${{ matrix.unity }} + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/rippled + !build/**/_INSTALLED_/** + + linux_derived: + name: Linux ${{ matrix.name }}, ${{ matrix.CC }}, ${{ matrix.BUILD_TYPE }}, unity + needs: [ linux_base ] + runs-on: rippled-runner-dind + timeout-minutes: 720 + strategy: + fail-fast: false + matrix: + include: + # All of these builds are special cases, so specify them + # one-by-one + # + # Note that "name" is used in the cache key, which cannot + # contain commas (','). + # + - # Coverage builds + name: "Coverage" + TARGET: coverage_report + SKIP_TESTS: true + NINJA_BUILD: true + CMAKE_ADD: "-Dcoverage=ON" + PARALLEL_TESTS: true + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: Debug + canfail: false + + - # dynamic lib builds + name: "non-static" + NINJA_BUILD: true + CMAKE_ADD: "-Dstatic=OFF" + PARALLEL_TESTS: true + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: Debug + canfail: false + + - name: "non-static BUILD_SHARED_LIBS" + NINJA_BUILD: true + CMAKE_ADD: "-Dstatic=OFF -DBUILD_SHARED_LIBS=ON" + PARALLEL_TESTS: true + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: Debug + canfail: false + + - # makefile + name: "makefile generator" + NINJA_BUILD: false + PARALLEL_TESTS: true + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: Debug + canfail: false + + - # verify build with min version of cmake + name: "min cmake version" + NINJA_BUILD: true + CMAKE_EXE: /opt/local/cmake-3.16/bin/cmake + SKIP_TESTS: true + PARALLEL_TESTS: true + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: Debug + canfail: false + + - # validator keys project as subproj of rippled + name: "validator-keys" + NINJA_BUILD: true + TARGET: validator-keys + CMAKE_ADD: "-Dvalidator_keys=ON" + PARALLEL_TESTS: true + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: Debug + canfail: false + + - # Reporting mode + name: "Reporting" + NINJA_BUILD: true + CMAKE_ADD: "-Dreporting=ON" + TARGET: rippled-reporting + PARALLEL_TESTS: true + CC: "clang-8" + CXX: "clang++-8" + BUILD_TYPE: "Debug" + canfail: false + + - # Coverage builds + name: "Coverage" + NINJA_BUILD: true + CMAKE_ADD: "-Dcoverage=ON" + TARGET: coverage_report + PARALLEL_TESTS: true + CC: "clang-8" + CXX: "clang++-8" + BUILD_TYPE: "Debug" + SKIP_TESTS: true + canfail: false + + - # asan + name: "asan" + ASAN_OPTIONS: "print_stats=true:atexit=true" + CMAKE_ADD: "-Dsan=address" + NINJA_BUILD: true + PARALLEL_TESTS: false + CC: "clang-8" + CXX: "clang++-8" + BUILD_TYPE: "Release" + canfail: false + + - # ubsan + name: "ubsan" + UBSAN_OPTIONS: "print_stacktrace=1:report_error_type=1" + CMAKE_ADD: "-Dsan=undefined" + NINJA_BUILD: true + PARALLEL_TESTS: false + CC: "clang-8" + CXX: "clang++-8" + BUILD_TYPE: "Release" + canfail: false + + #- # tsan + # # current tsan failure *might* be related to: + # # https://github.com/google/sanitizers/issues/1104 + # # but we can't get it to run, so leave it disabled for now + # name: "tsan" + # TSAN_OPTIONS: "history_size=3 external_symbolizer_path=/usr/bin/llvm-symbolizer verbosity=1" + # CMAKE_ADD: "-Dsan=thread" + # NINJA_BUILD: true + # PARALLEL_TESTS: false + # CC: "clang-8" + # CXX: "clang++-8" + # BUILD_TYPE: "Release" + # canfail: true + # https://docs.github.com/en/free-pro-team@latest/actions/reference/workflow-syntax-for-github-actions#jobsjob_idcontinue-on-error + continue-on-error: ${{ matrix.canfail }} + container: + # The env object is not accessible from here, so specify directly + image: rippleci/rippled-ci-builder:2020-01-08 + env: + CC: ${{ matrix.CC }} + CXX: ${{ matrix.CXX }} + BUILD_TYPE: ${{ matrix.BUILD_TYPE }} + unity: "ON" + # Need this image because it has the coverage tools + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + CMAKE_ADD: "-Dunity=ON ${{ matrix.CMAKE_ADD }}" + TARGET: ${{ matrix.TARGET }} + SKIP_TESTS: ${{ matrix.SKIP_TESTS }} + NINJA_BUILD: ${{ matrix.NINJA_BUILD }} + CMAKE_EXE: ${{ matrix.CMAKE_EXE }} + PARALLEL_TESTS: ${{ matrix.PARALLEL_TESTS }} + ASAN_OPTIONS: ${{ matrix.ASAN_OPTIONS }} + UBSAN_OPTIONS: ${{ matrix.UBSAN_OPTIONS }} + TSAN_OPTIONS: ${{ matrix.TSAN_OPTIONS }} + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Build cache + continue-on-error: true + uses: actions/cache@v2 + # The cache key is built on the following in order + # 1) OS. + # 2) The hash of all the CMake config files. + # 3) The build compiler + # 4) The build type + # 5) The build unity setting + # 6) The build custom name. + # 7) The hash of all the header files under the source dir. + # 8) The hash of all the code files under the source dir. + # 9) The hash of the files controlling this job + # When restoring, if the matching cache is not found, search for the + # cache from the "base" build, which should have just completed, and + # the NIH built as a fallback, but that should never happen. + with: + path: ${{ env.CACHE_DIR }} + key: ${{ runner.os }}-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.CC }}-${{ matrix.BUILD_TYPE }}-${{ matrix.unity }}-${{ matrix.name }}-${{ needs.linux_cache.outputs.headers }}-${{ needs.linux_cache.outputs.source }}-${{ needs.linux_cache.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ matrix.CC }}-${{ matrix.BUILD_TYPE }}-${{ matrix.unity }}-base-${{ needs.linux_cache.outputs.headers }}-${{ needs.linux_cache.outputs.source }}-${{ needs.linux_cache.outputs.instructions }} + + - name: Source cache + id: cache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH}} + # Because of the parent job, this cache key should definitely exist, + # but the disclaimer in the parent job still applies. + key: ${{ runner.os }}-nihsrc-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ env.image_tag }}-${{ needs.linux_cache.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-nihsrc-${{ needs.linux_cache.outputs.cmakeconfig }}-${{ env.image_tag }}- + + - name: Reset ccache stats + shell: bash + run: | + if ! type -a ccache + then + apt install ccache + fi + ccache -s + # Reset the stats so only this build counts + ccache -z + + - name: Disk space before + shell: bash + continue-on-error: true + run: | + df -h + du -sh ${CACHE_DIR} || true + du -sh ${CCACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Build and Test + id: try1 + continue-on-error: true + uses: ./.github/build-action + + - name: Retry once + id: try2 + continue-on-error: true + if: steps.try1.outcome == 'failure' + uses: ./.github/build-action + + - name: Retry twice + if: steps.try2.outcome == 'failure' + uses: ./.github/build-action + + - name: Unity files + if: failure() + shell: bash + # Unity builds sometimes cause OOM failures. + # When that happens, this may help diagnose which source file + # is the problem. + run: | + find build -iname 'unity_*.cxx' -ls -exec cat {} \; + + - name: Disk space after + shell: bash + continue-on-error: true + if: always() + run: | + df -h + du -sh ${CACHE_DIR} || true + du -sh ${CCACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Post-run ccache stats + shell: bash + run: ccache -s + + - name: Debug output artifacts + if: always() + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output ${{ matrix.name }} ${{ matrix.CC }} ${{ matrix.BUILD_TYPE }} unity=${{ env.unity }} + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/CMakeOutput.log + build/**/CMakeError.log + + - name: rippled executable artifact + if: matrix.TARGET != 'validator-keys' + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: rippled ${{ matrix.name }} ${{ matrix.CC }} ${{ matrix.BUILD_TYPE }} unity=${{ matrix.unity }} + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/rippled + !build/**/_INSTALLED_/** + + - name: validator-keys executable artifact + if: matrix.TARGET == 'validator-keys' + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: validator-keys + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/validator-keys + !build/**/_INSTALLED_/** + !build/**/CMake* + !build/**/cmake* + + linux_manual_test_runner: + # Can't access env in the name, so hard code + name: Linux manual tests gcc-8, ${{ matrix.BUILD_TYPE }}, unity + needs: linux_base + runs-on: rippled-runner-dind + timeout-minutes: 720 + strategy: + fail-fast: false + matrix: + BUILD_TYPE: [ "Debug", "Release" ] + env: + CC: "gcc-8" + CXX: "g++-8" + BUILD_TYPE: ${{ matrix.BUILD_TYPE }} + unity: "ON" + image_user: "rippleautomation" + image_name: "rippled-ci-builder" + image_tag: "clanglessthan10" + steps: + - name: Checkout + uses: actions/checkout@v2 + + - name: Download rippled artifact - ${{ env.CC }} ${{ matrix.BUILD_TYPE }} unity=${{ env.unity }} + uses: actions/download-artifact@v2 + with: + name: rippled base ${{ env.CC }} ${{ matrix.BUILD_TYPE }} unity=${{ env.unity }} + + - name: Run manual tests + shell: bash + env: + verbose: ${{ contains(env.commit_message, 'ci_verbose') }} + run: | + [[ "${verbose}" == "true" ]] && set -x || true + + find -iname rippled -ls + # There can be only one + [[ $( find -name rippled | wc -l ) -eq 1 ]] + APP_PATH="$( find -iname rippled )" + chmod a+rx "${APP_PATH}" + + : ${APP_ARGS:=} + declare -a manual_tests=$( bin/ci/ubuntu/manual-tests.sh "${APP_PATH}" ) + APP_ARGS+=" --unittest=${manual_tests}" + + APP_ARGS+=" --unittest-jobs ${NUM_PROCESSORS:-$(nproc)}" + + # Verbose or not, it's good to see the right params are being used + echo Run: "${APP_PATH}" ${APP_ARGS} + "${APP_PATH}" ${APP_ARGS} diff --git a/.github/workflows/macos2.yml.hold b/.github/workflows/macos2.yml.hold new file mode 100644 index 00000000000..3406104cc6b --- /dev/null +++ b/.github/workflows/macos2.yml.hold @@ -0,0 +1,453 @@ +# This workflow is out of date, but has useful information that should +# eventually be integrated into the "real" workflow. Leave this file as +# a placeholder until that is done. +# +name: "MacOS build" +on: [push, pull_request] + +env: + NINJA_BUILD: true + CACHE_DIR: ${{ github.workspace }}/_cache + NIH_CACHE_ROOT: ${{ github.workspace }}/_cache/nih_c + NIH_SRC_PATH: ${{ github.workspace }}/_srccache + PARALLEL_TESTS: true + USE_CCACHE: true + CCACHE_BASEDIR: ${{ github.workspace }} + CCACHE_NOHASHDIR: true + CCACHE_DIR: ${{ github.workspace }}/_cache/ccache + BOOST_URL: 'https://dl.bintray.com/boostorg/release/1.75.0/source/boost_1_75_0.tar.bz2' + # Alternate dowload location + BOOST_URL2: 'https://downloads.sourceforge.net/project/boost/boost/1.75.0/boost_1_75_0.tar.bz2?r=&ts=1594393912&use_mirror=newcontinuum' + BOOST_ROOT: ${{ github.workspace }}/boost_1_75_0 + # Travis downloader doesn't seem to have updated certs. Using this option + # introduces obvious security risks, but they're Travis's risks. + # Note that this option is only used if the "normal" build fails. + #BOOST_WGET_OPTIONS: '--no-check-certificate' + # Disable warnings as errors temporarily + # CMAKE_ARGS: "-G Ninja -Dwerr=ON -Dwextra=ON -DBoost_ARCHITECTURE=-x64 -DBoost_NO_SYSTEM_PATHS=ON -DCMAKE_VERBOSE_MAKEFILE=ON" + CMAKE_ARGS: "-G Ninja -DBoost_ARCHITECTURE=-x64 -DBoost_NO_SYSTEM_PATHS=ON -DCMAKE_VERBOSE_MAKEFILE=ON" + homebrew_packages: 'protobuf grpc pkg-config bash ninja cmake wget zstd libarchive openssl@1.1 ccache' + srcdir: ${{ github.workspace }}/rippled + builddir: ${{ github.workspace }}/rippled/build/macos + # This can't be used in the if: clauses + verbose: ${{ contains(github.event.head_commit.message, 'ci_verbose') }} + +jobs: + macos_cache: + name: Seed MacOS dependencies + runs-on: "macos-latest" + timeout-minutes: 720 + if: ${{ !contains(github.event.head_commit.message, 'ci_run_') || contains(github.event.head_commit.message, 'ci_run_mac') }} + outputs: + cmakeconfig: ${{ steps.cachekeys.outputs.cmakeconfig }} + headers: ${{ steps.cachekeys.outputs.headers }} + source: ${{ steps.cachekeys.outputs.source }} + instructions: ${{ steps.cachekeys.outputs.instructions }} + brewinfo: ${{ steps.brew.outputs.brewinfo }} + CC: ${{ steps.pre.outputs.CC }} + CXX: ${{ steps.pre.outputs.CXX }} + NUM_PROCESSORS: ${{ steps.pre.outputs.NUM_PROCESSORS }} + steps: + - name: Cancel Previous Runs + # https://github.com/styfle/cancel-workflow-action/tree/0.9.0 + # Note this will not cancel jobs triggered by a pull request event + # when the PR is created from a fork, even if for the same PR. + # It works fine within the same fork, and is intended to allow + # multiple pushes during development without ending up with a + # huge backlog of jobs. + if: ${{ !contains(github.event.head_commit.message, 'ci_no_cancel') }} + uses: styfle/cancel-workflow-action@0.9.0 + with: + access_token: ${{ github.token }} + + - name: Checkout + uses: actions/checkout@v2 + with: + path: rippled + + - name: Build cache keys + id: cachekeys + shell: bash + run: | + echo "::set-output name=cmakeconfig::${{ hashfiles( + 'rippled/CMakeFiles.txt', 'rippled/Builds/CMake/**', + 'rippled/.github/**/cache-invalidate.txt') }}" + echo "::set-output name=headers::${{ hashfiles('rippled/src/**/*.h', + 'rippled/src/**/*.hpp', 'rippled/src/**/*.ipp') }}" + echo "::set-output name=source::${{ hashfiles('rippled/src/**', + '!rippled/src/**/*.md', '!rippled/src/**/*.uml', + '!rippled/src/**/*.png', '!rippled/src/**/*.php') }}" + echo "::set-output name=instructions::${{ hashfiles( + 'rippled/**/macos.yml') }}" + + - name: Install Homebrew packages + run: | + echo Upgrade all brew packages + brew upgrade + for pkg in ${homebrew_packages} + do + [[ "${verbose}" == "true" ]] && echo INSTALLING ${pkg} || true + brew install ${pkg} + done + [[ "${verbose}" == "true" ]] && set -x || true + echo Currently installed brews: + brew list + brew info --all --json > brew.info + + - name: Hash Homebrew info + id: brew + run: | + echo "::set-output name=brewinfo::${{ hashfiles('brew.info') }}" + + - name: Boost cache + id: boostcache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.BOOST_ROOT }} + key: ${{ runner.os }}-boost-${{ env.BOOST_ROOT }} + + - name: Source cache + id: srccache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH}} + key: ${{ runner.os }}-nihsrc-${{ steps.cachekeys.outputs.cmakeconfig }}-${{ steps.cachekeys.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-nihsrc-${{ steps.cachekeys.outputs.cmakeconfig }}- + + - name: Dump environment + run: | + env + set | grep = + + - name: Disk space before + shell: bash + run: | + df -h + find ${CACHE_DIR} -maxdepth 2 \( -iname src -prune -o \ + -type d -exec du -sh {} \; \) || true + + - name: Pre-install + id: pre + shell: bash + run: | + export NUM_PROCESSORS=${NUM_PROCESSORS:-$(nproc || sysctl -n hw.logicalcpu || echo 3)} + echo "NUM PROC is ${NUM_PROCESSORS}" + mkdir -pv "${CACHE_DIR}" + echo "::set-output name=NUM_PROCESSORS::${NUM_PROCESSORS}" + echo "::set-output name=CC::$(brew --prefix llvm)/bin/clang" + echo "::set-output name=CXX::$(brew --prefix llvm)/bin/clang++" + + - name: Build boost + if: steps.boostcache.outputs.cache-hit != 'true' + shell: bash + env: + CC: ${{ steps.pre.outputs.CC }} + CXX: ${{ steps.pre.outputs.CXX }} + NUM_PROCESSORS: ${{ steps.pre.outputs.NUM_PROCESSORS }} + run: | + [[ "${verbose}" == "true" ]] && set -x || true + $CC --version + $CXX --version + export OPENSSL_ROOT=$(brew --prefix openssl@1.1) + cd rippled + ./Builds/containers/shared/install_boost.sh + # brew uninstall --ignore-dependencies boost + + - name: Download CMake dependencies + if: steps.srccache.outputs.cache-hit != 'true' + shell: bash + env: + CC: ${{ steps.pre.outputs.CC }} + CXX: ${{ steps.pre.outputs.CXX }} + NUM_PROCESSORS: ${{ steps.pre.outputs.NUM_PROCESSORS }} + run: | + cmake --version + ${CC} --version + ${CXX} --version + + export BLD_CONFIG=Debug + export OPENSSL_ROOT=$(brew --prefix openssl@1.1) + + ${CC} --version + ${CXX} --version + dir="${builddir}/$( basename ${CXX} ).${BLD_CONFIG}" + mkdir "-pv" "${dir}" + pushd "${dir}" + # Verbose or not, it's good to see the right params + # are being used + set "-x" + # rm is called if the first cmake fails, because the most + # likely cause is cache mismatch or corruption. If that happens + # reset the cache and try again. + cmake -DCMAKE_BUILD_TYPE=${BLD_CONFIG} \ + ${CMAKE_ARGS} ${srcdir} || \ + ( rm -rf ${{ env.CACHE_DIR }} ${{ env.NIH_SRC_PATH }}; \ + cmake -DCMAKE_BUILD_TYPE=${BLD_CONFIG} \ + ${CMAKE_ARGS} ${srcdir} ) + + alldeps=( $( \ + cmake --build . --target help | \ + grep phony | cut -d: -f1 | \ + sed 's/^\/.*\/rippled\/.*\/CMakeFiles\///' | \ + grep -v -e '\/' -e '^all' -e install -e clean \ + -e depend -e cache -e docs -e rippled -e xrpl_core \ + -e container -e '^rpm' -e '^dpkg' \ + ) ) + echo "Building dependencies: ${alldeps[@]}" + for dep in help "${alldeps[@]}" + do + echo Building $dep + cmake --build . --target $dep --parallel + done + set +x + + popd + + - name: Disk space after + shell: bash + if: always() + run: | + df -h + du -sh ${CACHE_DIR} || true + du -sh ${CCACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Debug output + if: steps.cache.outputs.cache-hit != 'true' || failure() + shell: bash + continue-on-error: true + run: | + time df -h + dir=rippled/build + if [ -e ${dir} ] ; then find ${dir} \( -name CMakeOutput.log -o \ + -name CMakeError.log \) -ls -exec cat {} \; ; fi + + - name: Debug output artifacts + if: steps.cache.outputs.cache-hit != 'true' || failure() + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output NIH dependencies + retention-days: 7 + if-no-files-found: ignore + path: | + rippled/build/**/CMakeOutput.log + rippled/build/**/CMakeError.log + + macos: + # These two matrix jobs could be split into separate jobs (debug + # and release), so the test runner job only needs to depend on + # the debug, but it doesn't seem worth the duplication at this + # time, since it's only one extra job (as opposed to a dozen), + # and the jobs appear to be pretty reliable so far. + name: MacOS ${{ matrix.build.BLD_CONFIG }} + needs: macos_cache + runs-on: "macos-latest" + timeout-minutes: 720 + strategy: + fail-fast: false + matrix: + build: + - BLD_CONFIG: "Debug" + + - BLD_CONFIG: "Release" + # Build Release with asserts + CMAKE_ADD: "-Dassert=ON" + + env: + BLD_CONFIG: ${{ matrix.build.BLD_CONFIG }} + CMAKE_ADD: ${{ matrix.build.CMAKE_ADD }} + CC: ${{ needs.macos_cache.outputs.CC }} + CXX: ${{ needs.macos_cache.outputs.CXX }} + NUM_PROCESSORS: ${{ needs.macos_cache.outputs.NUM_PROCESSORS }} + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + path: rippled + + - name: Install Homebrew packages + run: | + [[ "${verbose}" == "true" ]] && set -x || true + echo Upgrade all brew packages + brew upgrade + for pkg in ${homebrew_packages} + do + [[ "${verbose}" == "true" ]] && echo INSTALLING ${pkg} || true + brew install ${pkg} + done + + - name: Boost cache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.BOOST_ROOT }} + key: ${{ runner.os }}-boost-${{ env.BOOST_ROOT }} + + - name: Build cache + id: nihcache + continue-on-error: true + uses: actions/cache@v2 + # The cache key is built on the following in order + # 1) OS. + # 2) The hash of all the CMake config files. + # 3) The build type + # 4) The hash of all the header files under the source dir. + # 5) The hash of all the code files under the source dir. + # 6) The hash of the files controlling this job + # When restoring, if the matching cache is not found, search for a usable + # cache by chopping pieces off the end of the cache key until a match is + # found, with a special case for the NIH built in the previous job + with: + path: ${{ env.CACHE_DIR }} + key: ${{ runner.os }}-${{ needs.macos_cache.outputs.cmakeconfig }}-${{ env.BLD_CONFIG }}-${{ needs.macos_cache.outputs.headers }}-${{ needs.macos_cache.outputs.source }}-${{ needs.macos_cache.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-${{ needs.macos_cache.outputs.cmakeconfig }}-${{ env.BLD_CONFIG }}-${{ needs.macos_cache.outputs.headers }}-${{ needs.macos_cache.outputs.source }} + ${{ runner.os }}-${{ needs.macos_cache.outputs.cmakeconfig }}-${{ env.BLD_CONFIG }}-${{ needs.macos_cache.outputs.headers }} + ${{ runner.os }}-${{ needs.macos_cache.outputs.cmakeconfig }}-${{ env.BLD_CONFIG }} + + - name: Source cache + id: srccache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH}} + key: ${{ runner.os }}-nihsrc-${{ needs.macos_cache.outputs.cmakeconfig }}-${{ needs.macos_cache.outputs.instructions }} + + - name: Dump environment + run: | + env + set | grep = + + - name: Reset ccache stats + shell: bash + run: | + ccache -s + # Reset the stats so only this build counts + ccache -z + + - name: Disk space before + shell: bash + run: | + df -h + du -sh ${CACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Build + shell: bash + run: | + [[ "${verbose}" == "true" ]] && set -x || true + $CC --version + $CXX --version + export OPENSSL_ROOT=$(brew --prefix openssl@1.1) + if [ "${CMAKE_ADD}" != "" ] ; then + export CMAKE_ARGS+=" ${CMAKE_ADD}" + fi + if [[ ${USE_CCACHE:-} == true ]] && type -a ccache; then + echo "using ccache with basedir [${CCACHE_BASEDIR:-}]" + CMAKE_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache" + fi + echo ${CMAKE_ARGS} + mkdir -p $builddir && cd $builddir + cmake ${CMAKE_ARGS} -DCMAKE_BUILD_TYPE=${BLD_CONFIG} ${srcdir} + cmake --build . --parallel --verbose + + - name: Test + shell: bash + run: | + [[ "${verbose}" == "true" ]] && set -x || true + # Always run single threaded + ${builddir}/rippled --unittest + + - name: Disk space after + shell: bash + if: always() + run: | + df -h + du -sh ${CACHE_DIR} || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Post-run ccache stats + shell: bash + run: ccache -s + + - name: Debug output artifacts + if: always() + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output ${{ env.BLD_CONFIG }} + retention-days: 7 + if-no-files-found: ignore + path: | + rippled/build/**/CMakeOutput.log + rippled/build/**/CMakeError.log + + - name: rippled executable artifact + if: always() + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: rippled ${{ env.BLD_CONFIG }} + retention-days: 7 + if-no-files-found: ignore + path: | + rippled/build/**/rippled + !rippled/build/**/_INSTALLED_/** + + macos_runner: + # Can't access env in the name, so hard code + name: MacOS IPv6 test runner Debug + needs: [ macos_cache, macos ] + runs-on: "macos-latest" + timeout-minutes: 720 + strategy: + fail-fast: false + env: + name: "IPv6 tests" + BLD_CONFIG: "Debug" + TEST_EXTRA_ARGS: "--unittest-ipv6" + NUM_PROCESSORS: ${{ needs.macos_cache.outputs.NUM_PROCESSORS }} + steps: + - name: Download rippled artifact + uses: actions/download-artifact@v2 + with: + name: rippled ${{ env.BLD_CONFIG }} + + - name: Install Homebrew packages + run: | + [[ "${verbose}" == "true" ]] && set -x || true + echo Upgrade all brew packages + brew upgrade + for pkg in ${homebrew_packages} + do + [[ "${verbose}" == "true" ]] && echo INSTALLING ${pkg} || true + brew install ${pkg} + done + echo Currently installed brews: + brew list + brew info --all --json > brew.info + + - name: Run IPv6 tests + shell: bash + run: | + [[ "${verbose}" == "true" ]] && set -x || true + + : ${APP_ARGS:=} + # Always run single threaded + APP_ARGS+=" --unittest ${TEST_EXTRA_ARGS}" + + find . -name rippled -ls + # There can be only one + [[ $( find . -name rippled | wc -l ) -eq 1 ]] + APP_PATH="$( find . -name rippled )" + chmod a+rx "${APP_PATH}" + # Verbose or not, it's good to see the right params are being used + echo Run: "${APP_PATH}" ${APP_ARGS} + "${APP_PATH}" ${APP_ARGS} diff --git a/.github/workflows/windows2.yml.hold b/.github/workflows/windows2.yml.hold new file mode 100644 index 00000000000..8e1ebb20791 --- /dev/null +++ b/.github/workflows/windows2.yml.hold @@ -0,0 +1,498 @@ +# This workflow is out of date, but has useful information that should +# eventually be integrated into the "real" workflow. Leave this file as +# a placeholder until that is done. +# +name: "Windows build" +on: [push, pull_request] + +env: + CMAKE_EXTRA_ARGS: "-Dwerr=ON -Dwextra=ON" + NINJA_BUILD: true + CACHE_DIR: ${{ github.workspace }}\_cache + NIH_CACHE_ROOT: ${{ github.workspace }}\_cache\nih_c + NIH_SRC_PATH: ${{ github.workspace }}/_srccache + PARALLEL_TESTS: true + BOOST_TOOLSET: msvc-14.1 + BOOST_URL: 'https://dl.bintray.com/boostorg/release/1.75.0/source/boost_1_75_0.tar.bz2' + # Alternate download location + BOOST_URL2: 'https://downloads.sourceforge.net/project/boost/boost/1.75.0/boost_1_75_0.tar.bz2?r=&ts=1594393912&use_mirror=newcontinuum' + BOOST_CACHE_DIR: ${{ github.workspace }}\_boost_cache + BOOST_ROOT: C:\lib\boost_1_75_0 + CMAKE_BOOST: | + -DCMAKE_PREFIX_PATH=${BOOST_ROOT}\\_INSTALLED_ + -DBOOST_ROOT=${BOOST_ROOT}\\_INSTALLED_ + -DBoost_ROOT=${BOOST_ROOT}\\_INSTALLED_ + -DBoost_DIR=${BOOST_ROOT}\\_INSTALLED_\\lib\\cmake\\Boost-1.75.0 + -DBoost_COMPILER=vc141 + -DCMAKE_VERBOSE_MAKEFILE=ON + -DCMAKE_TOOLCHAIN_FILE=${VCPKG_DIR}\\scripts\\buildsystems\\vcpkg.cmake + -DVCPKG_TARGET_TRIPLET=x64-windows-static + VCPKG_DIR: '/c/vcpkg' + VCPKG_CACHE_DIR: ${{ github.workspace }}\_vcpkg_cache + VCPKG_DEFAULT_TRIPLET: "x64-windows-static" + CC: cl.exe + CXX: cl.exe + srcdir: ${{ github.workspace }}\rippled + builddir: ${{ github.workspace }}\rippled\build\ms + VCVersion: 'VC2017' + # 2017 compiler is version 14.16 + # 2019 compiler is version 14.2x + VC2017: '-vcvars_ver=14.16' + VC2019: '-vcvars_ver=14.29' + verbose: ${{ contains(github.event.head_commit.message, 'ci_verbose') }} + +jobs: + windows_cache: + name: Seed windows cache + runs-on: windows-runner + timeout-minutes: 720 + if: ${{ !contains(github.event.head_commit.message, 'ci_run_') || contains(github.event.head_commit.message, 'ci_run_win') }} + outputs: + cmakeconfig: ${{ steps.cachekeys.outputs.cmakeconfig }} + headers: ${{ steps.cachekeys.outputs.headers }} + source: ${{ steps.cachekeys.outputs.source }} + instructions: ${{ steps.cachekeys.outputs.instructions }} + vcpkgver: ${{ steps.cachekeys.outputs.vcpkgver }} + NUM_PROCESSORS: ${{ steps.cachekeys.outputs.NUM_PROCESSORS }} + steps: + - name: Cancel Previous Runs + # https://github.com/styfle/cancel-workflow-action/tree/0.9.0 + # Note this will not cancel jobs triggered by a pull request event + # when the PR is created from a fork, even if for the same PR. + # It works fine within the same fork, and is intended to allow + # multiple pushes during development without ending up with a + # huge backlog of jobs. + if: ${{ !contains(github.event.head_commit.message, 'ci_no_cancel') }} + uses: styfle/cancel-workflow-action@0.9.0 + with: + access_token: ${{ github.token }} + + - name: Checkout + uses: actions/checkout@v2 + with: + path: rippled + + - name: Build parameters + id: cachekeys + shell: bash + run: | + echo "::set-output name=cmakeconfig::${{ hashfiles( + 'rippled/CMakeFiles.txt', 'rippled/Builds/CMake/**', + 'rippled/.github/**/cache-invalidate.txt') }}" + echo "::set-output name=headers::${{ hashfiles('rippled/src/**/*.h', + 'rippled/src/**/*.hpp', 'rippled/src/**/*.ipp') }}" + echo "::set-output name=source::${{ hashfiles('rippled/src/**', + '!rippled/src/**/*.md', '!rippled/src/**/*.uml', + '!rippled/src/**/*.png', '!rippled/src/**/*.php') }}" + echo "::set-output name=instructions::${{ hashfiles( + 'rippled/**/windows.yml') }}" + vcpkgver=$( vcpkg version ) + echo "vcpkg version is ${vcpkgver}" + echo "::set-output name=vcpkgver::${vcpkgver}" + export NUM_PROCESSORS=${NUM_PROCESSORS:-$(nproc)} + echo "NUM PROC is ${NUM_PROCESSORS}" + echo "::set-output name=NUM_PROCESSORS::${NUM_PROCESSORS}" + + - name: Source cache + id: srccache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH}} + key: ${{ runner.os }}-nihsrc-${{ steps.cachekeys.outputs.cmakeconfig }}-${{ steps.cachekeys.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-nihsrc-${{ steps.cachekeys.outputs.cmakeconfig }}- + + - name: Vcpkg cache + id: vcpkgcache + uses: actions/cache@v2 + with: + path: ${{ env.VCPKG_CACHE_DIR }} + key: ${{ runner.os }}-vcpkg-${{ steps.cachekeys.outputs.vcpkgver }}-${{ steps.cachekeys.outputs.instructions }} + restore-keys: | + ${{ runner.os }}-vcpkg-${{ steps.cachekeys.outputs.vcpkgver }}- + ${{ runner.os }}-vcpkg- + + - name: Boost parameters + id: boostkeys + shell: bash + run: | + boost_file_win="${BOOST_CACHE_DIR}\boost.tar.bz2" + boost_file=$( cygpath --unix "${boost_file_win}" ) + download_boost_file=$( [[ -f ${boost_file} ]] && echo false || echo true ) + echo "boost_file_win is \"${boost_file_win}\"" + echo "boost_file is \"${boost_file}\"" + echo "download_boost_file is \"${download_boost_file}\"" + echo "::set-output name=boost_file_win::${boost_file_win}" + echo "::set-output name=boost_file::${boost_file}" + echo "::set-output name=download_boost_file::${download_boost_file}" + + - name: Dump environment + shell: bash + run: | + env + set | grep = + + [[ "${verbose}" == "true" ]] && \ + time find /c/Program\ Files\ \(x86\)/ -iname 'cl.exe' || true + + - name: Setup cache folders + shell: bash + env: + NUM_PROCESSORS: ${{ steps.cachekeys.outputs.NUM_PROCESSORS }} + run: | + mkdir -pv ${CACHE_DIR} ${NIH_CACHE_ROOT} ${BOOST_CACHE_DIR} \ + ${VCPKG_CACHE_DIR} ${NIH_SRC_PATH} + + - name: Initialize vcpkg cache + if: steps.vcpkgcache.outputs.cache-hit != 'true' + shell: bash + env: + NUM_PROCESSORS: ${{ steps.cachekeys.outputs.NUM_PROCESSORS }} + run: | + [[ "${verbose}" == "true" ]] && set -x || true + + find ${VCPKG_DIR} -maxdepth 1 -type d -exec du -sh {} \; || true + + if [[ -d ${VCPKG_CACHE_DIR} ]] + then + if pushd ${VCPKG_CACHE_DIR} + then + cp -au . ${VCPKG_DIR} || true + popd + fi + rm -rf ${VCPKG_CACHE_DIR} + fi + + cd ${srcdir} + . ./bin/sh/setup-msvc.sh ${!VCVersion} + if [[ "${verbose}" == "true" ]] ; then + type -a ninja || true + type -a cl.exe || true + type -a vcpkg + ls -l ${VCPKG_DIR} + du -sh ${VCPKG_DIR} + vcpkg help || true + vcpkg version || true + fi + mkdir -pv ${VCPKG_DIR}/installed + + # Force vcpkg to build with VS2017 + ls -l ${VCPKG_DIR}/triplets + tripletfile=${VCPKG_DIR}/triplets/${VCPKG_DEFAULT_TRIPLET}.cmake + if [[ -e ${tripletfile} ]] && ! grep -q "v141" ${tripletfile} + then + # https://otland.net/threads/problem-cxxframehandler4-gshandlercheck_eh4-on-compiling-tfs-1-3.267884/ + # https://github.com/Microsoft/vcpkg/issues/766 + cat ${tripletfile} + echo 'set(VCPKG_PLATFORM_TOOLSET "v141")' | \ + tee -a ${tripletfile} + fi + + df -h + bin/sh/install-vcpkg.sh openssl + bin/sh/install-vcpkg.sh libarchive[lz4] + bin/sh/install-vcpkg.sh grpc + + # TBD consider rocksdb via vcpkg if/when we can build with the + # vcpkg version + # bin/sh/install-vcpkg.sh rocksdb[snappy,lz4,zlib] + + mkdir -pv ${VCPKG_CACHE_DIR} + cp -a ${VCPKG_DIR}/{buildtrees,installed,packages} \ + ${VCPKG_CACHE_DIR} + + - name: Build Boost + if: steps.boostcache.outputs.cache-hit != 'true' + shell: bash + env: + NUM_PROCESSORS: ${{ steps.cachekeys.outputs.NUM_PROCESSORS }} + BOOST_FILE: ${{ steps.boostkeys.outputs.boost_file }} + run: | + [[ "${verbose}" == "true" ]] && set -x || true + + if [[ "${verbose}" == true ]] ; then + ls -l ${BOOST_FILE} || true + ls -l ${BOOST_CACHE_DIR} || true + fi + + cd ${srcdir} + + ./Builds/containers/shared/install_boost.sh ${!VCVersion} + + - name: Download CMake dependencies + if: steps.srccache.outputs.cache-hit != 'true' + shell: bash + env: + NUM_PROCESSORS: ${{ steps.cachekeys.outputs.NUM_PROCESSORS }} + run: | + if [[ "${verbose}" == "true" ]] ; then + set -x + choco list | wc + fi + [[ "${verbose}" == "true" ]] && type -a ninja || true + + # Download the NIH items + if [ "${CMAKE_BOOST}" != "" ] ; then + export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} $( eval echo ${CMAKE_BOOST} )" + fi + if [ "${CMAKE_ADD}" != "" ] ; then + export CMAKE_EXTRA_ARGS="${CMAKE_EXTRA_ARGS} $( eval echo ${CMAKE_ADD} )" + fi + + cmake --version + + export BUILD_TYPE=Debug + if [ "${CMAKE_ADD}" != "" ]; then + export CMAKE_EXTRA_ARGS+="${CMAKE_ADD}" + fi + + mkdir "-pv" "${builddir}" + pushd "${builddir}" + # Verbose or not, it's good to see the right params + # are being used + set "-x" + # rm is called if the first cmake fails, because the most + # likely cause is cache mismatch or corruption. If that happens + # reset the cache and try again. + + cmake -G Ninja -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + ${CMAKE_EXTRA_ARGS} ${srcdir} || \ + ( rm -rf ${{ env.CACHE_DIR }} ${{ env.NIH_SRC_PATH }}; \ + cmake -G Ninja -DCMAKE_BUILD_TYPE=${BUILD_TYPE} \ + ${CMAKE_EXTRA_ARGS} ${srcdir} ) + + alldeps=( $( \ + cmake --build . --target help | \ + grep phony | sed 's/[A-Z]:\/.*\/CMakeFiles\///' | \ + sed 's/: phony//' | \ + grep -v -e '\/' -e '^all' -e install -e clean \ + -e depend -e cache -e docs -e rippled \ + -e xrpl_core -e container -e '^rpm' -e '^dpkg' + ) ) + echo "Building dependencies to force downloads: ${alldeps[@]}" + + for dep in help "${alldeps[@]}" + do + echo Building $dep + cmake --build . --target $dep --parallel + done + set +x + + if [[ "${verbose}" == "true" ]] ; then + ls -l || true + grep vcpkg CMakeCache.txt || true + grep _cache CMakeCache.txt || true + fi + + popd + + - name: Cache sizes + shell: bash + run: | + du -sh ${VCPKG_DIR} || true + du -sh ${VCPKG_CACHE_DIR} || true + du -sh ${CACHE_DIR} || true + cd ${CACHE_DIR} + du -sh * || true + find ${NIH_SRC_PATH} ${NIH_CACHE_ROOT} -maxdepth 2 \ + -type d -exec du -sh {} \; || true + + - name: Debug output + if: steps.srccache.outputs.cache-hit != 'true' + shell: bash + continue-on-error: true + run: | + time df -h + if [ -e build ] ; then find build \( -name CMakeOutput.log -o -name CMakeError.log \) -ls -exec cat {} \; ; fi + [ -e /C/ProgramData/chocolatey/logs/chocolatey.log ] && cat /C/ProgramData/chocolatey/logs/chocolatey.log || true + + - name: Debug output artifacts + if: steps.srccache.outputs.cache-hit != 'true' + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output NIH ${{ matrix.ninja_ccs }} + retention-days: 7 + if-no-files-found: ignore + path: | + build/**/CMakeOutput.log + build/**/CMakeError.log + /C/ProgramData/chocolatey/logs/chocolatey.log + + windows_everything_else: + name: Windows ${{ matrix.VCVersion }}, ${{ matrix.build.type }}, ${{ matrix.cmake.CMAKE_GENERATOR }} unity=${{ matrix.cmake.unity }} + needs: windows_cache + runs-on: windows-runner-${{ matrix.VCVersion }} + timeout-minutes: 720 + strategy: + fail-fast: false + matrix: + VCVersion: [ 'VC2017', 'VC2019' ] + build: + - # Debug builds have some tests that seem to make the Github + # hosts run out of resources. Skip those tests and run them + # manually + type: Debug + ci_tests: ON + + - type: Release + ci_tests: OFF + cmake: + - CMAKE_GENERATOR: 'Ninja' + unity: 'ON' + + - CMAKE_GENERATOR: 'Visual Studio 16 2019' + CMAKE_ADD: '-DCMAKE_GENERATOR_TOOLSET=v141,host=x64 -Ax64' + DESTDIR: '_installed_' + RUNDIR: '_installed_/Program Files/rippled/bin' + BUILD_ARGS: '--config ${BUILD_TYPE} --target install' + unity: 'OFF' + + env: + BUILD_TYPE: ${{ matrix.build.type }} + NUM_PROCESSORS: ${{ needs.windows_cache.outputs.NUM_PROCESSORS }} + VCVersion: ${{ matrix.VCVersion }} + steps: + - name: Checkout + uses: actions/checkout@v2 + with: + path: ${{ env.srcdir }} + + - name: Build cache + continue-on-error: true + uses: actions/cache@v2 + # The cache key is built on the following in order + # 1) OS. + # 2) The hash of all the CMake config files. + # 3) The vsvars version + # 4) The build type + # 5) The build unity setting + # 6) The cmake generator + # 7) The hash of all the header files under the source dir. + # 8) The hash of all the code files under the source dir. + # 9) The hash of the files controlling this job + # When restoring, if the matching cache is not found, search for a + # usable cache by chopping pieces off the end of the cache key + # until a match is found, with a special case for the NIH built in + # the previous job. + with: + path: ${{ env.CACHE_DIR }} + key: ${{ runner.os }}-${{ needs.windows_cache.outputs.cmakeconfig }}-${{ matrix.VCVersion }}-${{ matrix.build.type }}-${{ matrix.cmake.unity }}-${{ matrix.cmake.CMAKE_GENERATOR }}-${{ needs.windows_cache.outputs.headers }}-${{ needs.windows_cache.outputs.source }}-${{ needs.windows_cache.outputs.instructions}} + restore-keys: | + ${{ runner.os }}-${{ needs.windows_cache.outputs.cmakeconfig }}-${{ matrix.VCVersion }}-${{ matrix.cmake.CMAKE_GENERATOR }}-${{ matrix.build.type }}-${{ matrix.cmake.unity }}-${{ matrix.cmake.CMAKE_GENERATOR }}-${{ needs.windows_cache.outputs.headers }}-${{ needs.windows_cache.outputs.source }} + ${{ runner.os }}-${{ needs.windows_cache.outputs.cmakeconfig }}-${{ matrix.VCVersion }}-${{ matrix.cmake.CMAKE_GENERATOR }}-${{ matrix.build.type }}-${{ matrix.cmake.unity }}-${{ matrix.cmake.CMAKE_GENERATOR }}-${{ needs.windows_cache.outputs.headers }} + ${{ runner.os }}-${{ needs.windows_cache.outputs.cmakeconfig }}-${{ matrix.VCVersion }}-${{ matrix.cmake.CMAKE_GENERATOR }}-${{ matrix.build.type }}-${{ matrix.cmake.unity }}-${{ matrix.cmake.CMAKE_GENERATOR }} + + - name: Source cache + id: cache + continue-on-error: true + uses: actions/cache@v2 + with: + path: ${{ env.NIH_SRC_PATH}} + key: ${{ runner.os }}-nihsrc-${{ needs.windows_cache.outputs.cmakeconfig }}-${{ needs.windows_cache.outputs.instructions }} + + - name: Vcpkg cache + id: vcpkgcache + uses: actions/cache@v2 + with: + path: ${{ env.VCPKG_CACHE_DIR }} + key: ${{ runner.os }}-vcpkg-${{ needs.windows_cache.outputs.vcpkgver }}-${{ needs.windows_cache.outputs.instructions }} + + - name: Dump environment + shell: bash + run: | + env + set | grep = + + - name: Build + shell: bash + run: | + [[ "${verbose}" == "true" ]] && set -x || true + df -h + . ${srcdir}/bin/sh/setup-msvc.sh ${!VCVersion} + [[ "${verbose}" == "true" ]] && type -a ninja || true + mkdir -p $builddir && cd $builddir + export CMAKE_EXTRA_ARGS+=" -DCMAKE_BUILD_TYPE=${BUILD_TYPE}" + export CMAKE_EXTRA_ARGS+=" -Dunity=${{ matrix.cmake.unity }}" + export CMAKE_EXTRA_ARGS+=" -Dci_tests=${{ matrix.build.ci_tests }}" + if [ "${CMAKE_BOOST}" != "" ] ; then + export CMAKE_EXTRA_ARGS+=" $( eval echo ${CMAKE_BOOST} )" + fi + if [ "${{ matrix.cmake.CMAKE_ADD }}" != "" ] ; then + export CMAKE_EXTRA_ARGS+=" $( eval \ + echo ${{ matrix.cmake.CMAKE_ADD }} )" + fi + if [ "${{ matrix.cmake.DESTDIR }}" != "" ] ; then + export DESTDIR+="${builddir}/${{ matrix.cmake.DESTDIR }}" + fi + if [ "${{ matrix.cmake.BUILD_ARGS }}" != "" ] ; then + export BUILD_ARGS="$( eval echo ${{ matrix.cmake.BUILD_ARGS }} )" + fi + cmake -G "${{ matrix.cmake.CMAKE_GENERATOR }}" \ + ${CMAKE_EXTRA_ARGS} ${srcdir} + cmake --build . --parallel --verbose ${BUILD_ARGS} + + - name: Test + shell: bash + run: | + [[ "${verbose}" == "true" ]] && set -x || true + export RUNDIR="${builddir}" + if [ "${{ matrix.cmake.RUNDIR }}" != "" ] ; then + export RUNDIR+="/${{ matrix.cmake.RUNDIR }}" + fi + # override num procs to force fewer unit test jobs + if [[ "${{ matrix.test_jobs }}" != "" ]] + then + export NUM_PROCESSORS="${{ matrix.test_jobs }}" + echo Override NUM_PROCESSORS to ${NUM_PROCESSORS} + fi + "${RUNDIR}/rippled.exe" --version + + # No need to save space. Always include the full test output, + # and the test command lines. + set -x + if [[ "${{ matrix.build.ci_tests}}" == "ON" ]] + then + "${RUNDIR}/rippled.exe" --unittest=Check --unittest-log \ + --unittest-jobs ${NUM_PROCESSORS} + fi + "${RUNDIR}/rippled.exe" --unittest --unittest-log \ + --unittest-jobs ${NUM_PROCESSORS} + + - name: Unity files + if: failure() + shell: bash + # Unity builds sometimes cause OOM failures. + # When that happens, this may help diagnose which source file + # is the problem. + run: | + find ${builddir} -iname 'unity_*.cxx' -ls -exec cat {} \; + + - name: Debug output artifacts + if: ${{ always() }} + uses: actions/upload-artifact@v2 + continue-on-error: true + with: + name: debug-output ${{ matrix.name }} ${{ matrix.build.type }} ${{ matrix.VCVersion }} unity=${{ matrix.cmake.unity }} ${{ matrix.cmake.CMAKE_GENERATOR }} + retention-days: 7 + if-no-files-found: ignore + path: | + rippled/build/**/CMakeOutput.log + rippled/build/**/CMakeError.log + /C/ProgramData/chocolatey/logs/chocolatey.log + + - name: rippled executable artifact + uses: actions/upload-artifact@v2 + if: ${{ always() }} + continue-on-error: true + with: + name: rippled ${{ matrix.name }} ${{ matrix.build.type }} ${{ matrix.VCVersion }} unity=${{ matrix.cmake.unity }} ${{ matrix.cmake.CMAKE_GENERATOR }} + retention-days: 7 + if-no-files-found: ignore + path: | + rippled/build/**/rippled.exe + rippled/build/**/core* + !rippled/build/**/rippled.dir* + !**/_INSTALLED_/** + !**/_installed_/** diff --git a/bin/ci/ubuntu/build-and-test.sh b/bin/ci/ubuntu/build-and-test.sh index 2c1734863fb..029671b10be 100755 --- a/bin/ci/ubuntu/build-and-test.sh +++ b/bin/ci/ubuntu/build-and-test.sh @@ -21,7 +21,8 @@ echo "BUILD TYPE: ${BUILD_TYPE}" echo "BUILD TARGET: ${TARGET}" JOBS=${NUM_PROCESSORS:-2} -if [[ ${TRAVIS:-false} != "true" ]]; then +if [[ ${TRAVIS:-false} != "true" && ${GITHUB_ACTIONS:-false} != "true" ]] +then JOBS=$((JOBS+1)) fi @@ -63,7 +64,7 @@ if version_ge $CMAKE_VER "3.12.0" ; then BUILDARGS+=" --parallel" fi -if [[ ${NINJA_BUILD:-} == false ]]; then +if [[ ${NINJA_BUILD:-} != true ]]; then if version_ge $CMAKE_VER "3.12.0" ; then BUILDARGS+=" ${JOBS}" else @@ -76,7 +77,7 @@ if [[ ${VERBOSE_BUILD:-} == true ]]; then if version_ge $CMAKE_VER "3.14.0" ; then BUILDARGS+=" --verbose" else - if [[ ${NINJA_BUILD:-} == false ]]; then + if [[ ${NINJA_BUILD:-} != true ]]; then BUILDTOOLARGS+=" verbose=1" else BUILDTOOLARGS+=" -v" @@ -84,7 +85,7 @@ if [[ ${VERBOSE_BUILD:-} == true ]]; then fi fi -if [[ ${USE_CCACHE:-} == true ]]; then +if [[ ${USE_CCACHE:-} == true ]] && type -a ccache; then echo "using ccache with basedir [${CCACHE_BASEDIR:-}]" CMAKE_EXTRA_ARGS+=" -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache" fi @@ -111,20 +112,51 @@ do done # generate -${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS} -# Display the cmake output, to help with debugging if something fails -for file in CMakeOutput.log CMakeError.log -do - if [ -f CMakeFiles/${file} ] - then - ls -l CMakeFiles/${file} - cat CMakeFiles/${file} - fi -done +if ! ${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS} \ + && [[ -e "${NIH_CACHE_ROOT}" ]] +then + # Only the *-stamp directories, which track the source location + # and git info of the NIH cache, are safe to use across builds. + # The build folders, which are more specific need to be removed + # before being cached. That means wasted build effort for one job + # (gcc-9, Debug), but the saved source space and download time + # across all jobs should make up for it. + find ${NIH_CACHE_ROOT} -depth -type d \ + \( -iname '*-stamp' -printf "Keep %p\n" -prune -o \ + \( -iname '*-build' -o -name 'tmp' \) -printf "Delete %p\n" \ + -exec rm -rf {} \; -o \ + -iname '*-subbuild' -printf "Clean %p\n" \ + -exec rm -rfv {}/CMakeCache.txt {}/CMakeFiles \ + {}/cmake_install.cmake {}/CMakeLists.txt\ + {}/Makefile \; -exec ls {} \; \) + ${time} cmake ../.. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} ${CMAKE_EXTRA_ARGS} +fi + + +# Display the cmake output, to help with debugging if something fails, +# unless this is running under a Github action. They have another +# mechanism to dump the logs. +if [[ ! -v GITHUB_ACTIONS || "${GITHUB_ACTIONS}" != "true" ]] +then + for file in CMakeOutput.log CMakeError.log + do + if [ -f CMakeFiles/${file} ] + then + ls -l CMakeFiles/${file} + cat CMakeFiles/${file} + fi + done +fi # build export DESTDIR=$(pwd)/_INSTALLED_ -${time} eval cmake --build . ${BUILDARGS} -- ${BUILDTOOLARGS} +if ! ${time} eval cmake --build . ${BUILDARGS} -- ${BUILDTOOLARGS} \ + && [[ -e "${NIH_CACHE_ROOT}" ]] +then + # Caching isn't perfect + rm -rf ${NIH_CACHE_ROOT} + ${time} eval cmake --build . ${BUILDARGS} -- ${BUILDTOOLARGS} +fi if [[ ${TARGET} == "docs" ]]; then ## mimic the standard test output for docs build @@ -140,6 +172,8 @@ popd if [[ "${TARGET}" == "validator-keys" ]] ; then export APP_PATH="$PWD/build/${BUILD_DIR}/validator-keys/validator-keys" +elif [[ "${TARGET}" == "rippled-reporting" ]] ; then + export APP_PATH="$PWD/build/${BUILD_DIR}/${TARGET}" else export APP_PATH="$PWD/build/${BUILD_DIR}/rippled" fi @@ -153,43 +187,14 @@ ldd ${APP_PATH} if [[ "${TARGET}" == "validator-keys" ]] ; then APP_ARGS="--unittest" else - function join_by { local IFS="$1"; shift; echo "$*"; } - - # This is a list of manual tests - # in rippled that we want to run - # ORDER matters here...sorted in approximately - # descending execution time (longest running tests at top) - declare -a manual_tests=( - 'ripple.ripple_data.reduce_relay_simulate' - 'ripple.tx.Offer_manual' - 'ripple.tx.CrossingLimits' - 'ripple.tx.PlumpBook' - 'ripple.app.Flow_manual' - 'ripple.tx.OversizeMeta' - 'ripple.consensus.DistributedValidators' - 'ripple.app.NoRippleCheckLimits' - 'ripple.ripple_data.compression' - 'ripple.NodeStore.Timing' - 'ripple.consensus.ByzantineFailureSim' - 'beast.chrono.abstract_clock' - 'beast.unit_test.print' - ) - if [[ ${TRAVIS:-false} != "true" ]]; then - # these two tests cause travis CI to run out of memory. - # TODO: investigate possible workarounds. - manual_tests=( - 'ripple.consensus.ScaleFreeSim' - 'ripple.tx.FindOversizeCross' - "${manual_tests[@]}" - ) - fi + declare -a manual_tests=$( $(dirname "$0")/manual-tests.sh "${APP_PATH}" ) if [[ ${MANUAL_TESTS:-} == true ]]; then - APP_ARGS+=" --unittest=$(join_by , "${manual_tests[@]}")" + APP_ARGS+=" --unittest=${manual_tests}" else APP_ARGS+=" --unittest --quiet --unittest-log" fi - if [[ ${coverage} == false && ${PARALLEL_TESTS:-} == true ]]; then + if [[ ${coverage} != true && ${PARALLEL_TESTS:-} == true ]]; then APP_ARGS+=" --unittest-jobs ${JOBS}" fi @@ -222,6 +227,17 @@ if [[ ${look_core} == true ]]; then before=$(ls -A1 ${coredir}) fi +if [[ -v MINTESTAVAIL && \ + $( df . --output=avail | tail -1 ) -lt ${MINTESTAVAIL} ]] +then + echo Removing install dir for space: ${DESTDIR} + rm -rf ${DESTDIR} +fi +df -h +du -sh ${CACHE_DIR} +du -sh ${CCACHE_DIR} || true +find ${NIH_CACHE_ROOT} -maxdepth 2 \( -iname src -prune -o -type d -exec du -sh {} \; \) +find build -maxdepth 3 \( -iname src -prune -o -type d -exec du -sh {} \; \) set +e echo "Running tests for ${APP_PATH}" if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then @@ -233,12 +249,20 @@ if [[ ${MANUAL_TESTS:-} == true && ${PARALLEL_TESTS:-} != true ]]; then fi done else - ${APP_PATH} ${APP_ARGS} + # If tests fail, let them retry up to 2 more times + retry=2 + until ${APP_PATH} ${APP_ARGS} || [[ $retry -le 0 ]] + do + retry=$[ $retry - 1 ] + echo $retry retries remaining + sleep 5 + done TEST_STAT=$? fi set -e if [[ ${look_core} == true ]]; then + echo "current path: $(pwd), core dir: ${coredir}" after=$(ls -A1 ${coredir}) oIFS="${IFS}" IFS=$'\n\r' @@ -246,21 +270,24 @@ if [[ ${look_core} == true ]]; then for l in $(diff -w --suppress-common-lines <(echo "$before") <(echo "$after")) ; do if [[ "$l" =~ ^[[:space:]]*\>[[:space:]]*(.+)$ ]] ; then corefile="${BASH_REMATCH[1]}" - echo "FOUND core dump file at '${coredir}/${corefile}'" - gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt) - found_core=true - gdb \ - -ex "set height 0" \ - -ex "set logging file ${gdb_output}" \ - -ex "set logging on" \ - -ex "print 'ripple::BuildInfo::versionString'" \ - -ex "thread apply all backtrace full" \ - -ex "info inferiors" \ - -ex quit \ - "$APP_PATH" \ - "${coredir}/${corefile}" &> /dev/null - - echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)" + if [[ "$( file -b ${coredir}/${corefile} )" =~ "core" ]]; + then + echo "FOUND core dump file at '${coredir}/${corefile}'" + gdb_output=$(/bin/mktemp /tmp/gdb_output_XXXXXXXXXX.txt) + found_core=true + gdb \ + -ex "set height 0" \ + -ex "set logging file ${gdb_output}" \ + -ex "set logging on" \ + -ex "print 'ripple::BuildInfo::versionString'" \ + -ex "thread apply all backtrace full" \ + -ex "info inferiors" \ + -ex quit \ + "$APP_PATH" \ + "${coredir}/${corefile}" &> /dev/null + + echo -e "CORE INFO: \n\n $(cat ${gdb_output}) \n\n)" + fi fi done IFS="${oIFS}" diff --git a/bin/ci/ubuntu/manual-tests.sh b/bin/ci/ubuntu/manual-tests.sh new file mode 100755 index 00000000000..8b36cd1898c --- /dev/null +++ b/bin/ci/ubuntu/manual-tests.sh @@ -0,0 +1,60 @@ +#!/usr/bin/env bash +set -e + +function join_by { local IFS="$1"; shift; echo -n "$*"; } + +if [[ $# -gt 0 && -e "${1}" ]] +then + # Get the list of manual tests from rippled, and exclude the + # troublemakers + # DetectCrash intentionally fails + exclude="grep -v -e beast.unit_test.DetectCrash" + if [[ ${TRAVIS:-false} == "true" || ${GITHUB_ACTIONS:-false} == "true" ]] + then + exclude+=" -e ripple.consensus.ScaleFreeSim \ + -e ripple.tx.FindOversizeCross" + fi + if type -p dos2unix >& /dev/null + then + dos2unix="dos2unix" + else + dos2unix="cat" + fi + + declare -a manual_tests=( $( "${1}" --unittest=beast.unit_test.print | \ + grep '|M|' | cut -d\ -f2 | ${exclude} | ${dos2unix} ) ) + join_by , "${manual_tests[@]}" + exit 0 +fi + +# This is a list of manual tests +# in rippled that we want to run +# ORDER matters here...sorted in approximately +# descending execution time (longest running tests at top) +declare -a manual_tests=( + 'ripple.ripple_data.reduce_relay_simulate' + 'ripple.tx.Offer_manual' + 'ripple.tx.CrossingLimits' + 'ripple.tx.PlumpBook' + 'ripple.app.Flow_manual' + 'ripple.tx.OversizeMeta' + 'ripple.consensus.DistributedValidators' + 'ripple.app.NoRippleCheckLimits' + 'ripple.ripple_data.compression' + 'ripple.NodeStore.Timing' + 'ripple.consensus.ByzantineFailureSim' + 'beast.chrono.abstract_clock' + 'beast.unit_test.print' +) +if [[ ${TRAVIS:-false} != "true" && ${GITHUB_ACTIONS:-false} != "true" ]] +then + # these two tests cause travis CI to run out of memory. + # TODO: investigate possible workarounds. + manual_tests=( + 'ripple.consensus.ScaleFreeSim' + 'ripple.tx.FindOversizeCross' + "${manual_tests[@]}" + ) +fi + +join_by , "${manual_tests[@]}" diff --git a/bin/sh/install-vcpkg.sh b/bin/sh/install-vcpkg.sh index 8cf8f2d0881..f762ed9f20b 100755 --- a/bin/sh/install-vcpkg.sh +++ b/bin/sh/install-vcpkg.sh @@ -20,8 +20,16 @@ else if [[ -d "${VCPKG_DIR}" ]] ; then rm -rf "${VCPKG_DIR}" fi - git clone --branch 2021.04.30 https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR} + gittag=2021.05.12 + git clone --branch ${gittag} https://github.com/Microsoft/vcpkg.git ${VCPKG_DIR} pushd ${VCPKG_DIR} + if git tag -l --contains ${gittag} | grep -v ${gittag} + then + echo ****************************************************** >&2 + echo WARNING: Found git tags of vcpkg later than ${gittag}. >&2 + echo Consider updating to get the latest software versions. >&2 + echo ****************************************************** >&2 + fi BSARGS=() if [[ "$(uname)" == "Darwin" ]] ; then BSARGS+=(--allowAppleClang) diff --git a/bin/sh/setup-msvc.sh b/bin/sh/setup-msvc.sh index 8d61c9757fa..4a55d38d82e 100755 --- a/bin/sh/setup-msvc.sh +++ b/bin/sh/setup-msvc.sh @@ -1,35 +1,52 @@ # NOTE: must be sourced from a shell so it can export vars +# Will pass all command line params through to vcvarsall.bat cat << BATCH > ./getenv.bat CALL %* ENV BATCH +if echo "$-" | grep -q "x" +then + # Don't output commands + set +x + restorex=1 +fi while read line ; do IFS='"' read x path arg <<<"${line}" if [ -f "${path}" ] ; then echo "FOUND: $path" - export VCINSTALLDIR=$(./getenv.bat "${path}" ${arg} | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g") + vcenv=$( ./getenv.bat "${path}" ${arg} ) + export VCINSTALLDIR=$( echo "${vcenv}" | grep "^VCINSTALLDIR=" | sed -E "s/^VCINSTALLDIR=//g") if [ "${VCINSTALLDIR}" != "" ] ; then echo "USING ${VCINSTALLDIR}" - export LIB=$(./getenv.bat "${path}" ${arg} | grep "^LIB=" | sed -E "s/^LIB=//g") - export LIBPATH=$(./getenv.bat "${path}" ${arg} | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g") - export INCLUDE=$(./getenv.bat "${path}" ${arg} | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g") - ADDPATH=$(./getenv.bat "${path}" ${arg} | grep "^PATH=" | sed -E "s/^PATH=//g") + export LIB=$( echo "${vcenv}" | grep "^LIB=" | sed -E "s/^LIB=//g") + export LIBPATH=$(echo "${vcenv}" | grep "^LIBPATH=" | sed -E "s/^LIBPATH=//g") + export INCLUDE=$(echo "${vcenv}" | grep "^INCLUDE=" | sed -E "s/^INCLUDE=//g") + ADDPATH=$(echo "${vcenv}" | grep "^PATH=" | sed -E "s/^PATH=//g") + for var in LIB LIBPATH INCLUDE ADDPATH + do + echo "${var}: ${!var}" + done export PATH="${ADDPATH}:${PATH}" break + else + echo "VCINSTALLDIR not found. Complete environment:" + echo "${vcenv}" fi fi done <= 7)) || \ (defined(__APPLE__) && (__apple_build_version__ >= 10010043)) +// This can be removed when the minimum required CMake version is >= 3.11 #pragma clang diagnostic push #pragma clang diagnostic ignored "-Wself-assign-overloaded" #endif diff --git a/src/test/jtx/impl/Env.cpp b/src/test/jtx/impl/Env.cpp index 6f0f9e3fc73..fec9262f6f7 100644 --- a/src/test/jtx/impl/Env.cpp +++ b/src/test/jtx/impl/Env.cpp @@ -117,6 +117,25 @@ Env::closed() return app().getLedgerMaster().getClosedLedger(); } +std::string +errorMessage(Json::Value const& resp) +{ + if (resp.isMember("error_what") && !resp["error_what"].isNull()) + return resp["error_what"].asString(); + else if ( + resp.isMember(jss::error_exception) && + !resp[jss::error_exception].isNull()) + return resp[jss::error_exception].asString(); + else if ( + resp.isMember(jss::error_message) && !resp[jss::error_message].isNull()) + return resp[jss::error_message].asString(); + else if (resp.isMember(jss::error) && !resp[jss::error].isNull()) + return resp[jss::error].asString(); + else if (resp.isMember(jss::result) && resp[jss::result].isObject()) + return errorMessage(resp[jss::result]); + return "internal error"; +} + bool Env::close( NetClock::time_point closeTime, @@ -136,15 +155,8 @@ Env::close( auto resp = rpc("ledger_accept"); if (resp["result"]["status"] != std::string("success")) { - std::string reason = "internal error"; - if (resp.isMember("error_what")) - reason = resp["error_what"].asString(); - else if (resp.isMember("error_message")) - reason = resp["error_message"].asString(); - else if (resp.isMember("error")) - reason = resp["error"].asString(); - - JLOG(journal.error()) << "Env::close() failed: " << reason; + JLOG(journal.error()) + << "Env::close() failed: " << errorMessage(resp); res = false; } } diff --git a/src/test/jtx/impl/envconfig.cpp b/src/test/jtx/impl/envconfig.cpp index c9788a6d70f..14aace9e345 100644 --- a/src/test/jtx/impl/envconfig.cpp +++ b/src/test/jtx/impl/envconfig.cpp @@ -97,8 +97,8 @@ secure_gateway(std::unique_ptr cfg) std::unique_ptr admin_localnet(std::unique_ptr cfg) { - (*cfg)[PORT_RPC].set("admin", "127.0.0.0/8"); - (*cfg)[PORT_WS].set("admin", "127.0.0.0/8"); + (*cfg)[PORT_RPC].set("admin", "127.0.0.0/8,[::1]"); + (*cfg)[PORT_WS].set("admin", "127.0.0.0/8,[::1]"); return cfg; } @@ -107,8 +107,8 @@ secure_gateway_localnet(std::unique_ptr cfg) { (*cfg)[PORT_RPC].set("admin", ""); (*cfg)[PORT_WS].set("admin", ""); - (*cfg)[PORT_RPC].set("secure_gateway", "127.0.0.0/8"); - (*cfg)[PORT_WS].set("secure_gateway", "127.0.0.0/8"); + (*cfg)[PORT_RPC].set("secure_gateway", "127.0.0.0/8,[::1]"); + (*cfg)[PORT_WS].set("secure_gateway", "127.0.0.0/8,[::1]"); return cfg; } diff --git a/src/test/jtx/impl/flags.cpp b/src/test/jtx/impl/flags.cpp index 992e1a88bb2..71febc412c4 100644 --- a/src/test/jtx/impl/flags.cpp +++ b/src/test/jtx/impl/flags.cpp @@ -51,7 +51,9 @@ void nflags::operator()(Env& env) const { auto const sle = env.le(account_); - if (sle->isFieldPresent(sfFlags)) + if (!sle) + env.test.fail(); + else if (sle->isFieldPresent(sfFlags)) env.test.expect((sle->getFieldU32(sfFlags) & mask_) == 0); else env.test.pass(); diff --git a/src/test/overlay/reduce_relay_test.cpp b/src/test/overlay/reduce_relay_test.cpp index 4d9086dab10..6a659b2aba0 100644 --- a/src/test/overlay/reduce_relay_test.cpp +++ b/src/test/overlay/reduce_relay_test.cpp @@ -1597,7 +1597,11 @@ class reduce_relay_simulate_test : public reduce_relay_test }; BEAST_DEFINE_TESTSUITE(reduce_relay, ripple_data, ripple); -BEAST_DEFINE_TESTSUITE_MANUAL(reduce_relay_simulate, ripple_data, ripple); +BEAST_DEFINE_TESTSUITE_MANUAL_PRIO( + reduce_relay_simulate, + ripple_data, + ripple, + 75); } // namespace test diff --git a/src/test/rpc/AccountSet_test.cpp b/src/test/rpc/AccountSet_test.cpp index e5475e3f530..3c6cad00e28 100644 --- a/src/test/rpc/AccountSet_test.cpp +++ b/src/test/rpc/AccountSet_test.cpp @@ -41,7 +41,7 @@ class AccountSet_test : public beast::unit_test::suite env.fund(XRP(10000), noripple(alice)); // ask for the ledger entry - account root, to check its flags auto const jrr = env.le(alice); - BEAST_EXPECT((*env.le(alice))[sfFlags] == 0u); + BEAST_EXPECT(jrr && jrr->at(sfFlags) == 0u); } void diff --git a/src/xrpld/app/main/Main.cpp b/src/xrpld/app/main/Main.cpp index 799911f63dd..6d2bf21c6d1 100644 --- a/src/xrpld/app/main/Main.cpp +++ b/src/xrpld/app/main/Main.cpp @@ -877,7 +877,5 @@ main(int argc, char** argv) } #endif - atexit(&google::protobuf::ShutdownProtobufLibrary); - return ripple::run(argc, argv); } diff --git a/src/xrpld/net/detail/RPCCall.cpp b/src/xrpld/net/detail/RPCCall.cpp index 997d6463f23..9cf596b471b 100644 --- a/src/xrpld/net/detail/RPCCall.cpp +++ b/src/xrpld/net/detail/RPCCall.cpp @@ -1538,12 +1538,14 @@ rpcClient( { jvOutput = rpcError(rpcINVALID_PARAMS); jvOutput["error_what"] = e.what(); + jvOutput[jss::error_exception] = e.what(); nRet = rpcINVALID_PARAMS; } catch (std::exception& e) { jvOutput = rpcError(rpcINTERNAL); jvOutput["error_what"] = e.what(); + jvOutput[jss::error_exception] = e.what(); nRet = rpcINTERNAL; }