forked from modflowpy/flopy
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* workaround intermittent macos CI matplotlib failures (modflowpy#1491) * don't use plt.show() in tests * add explanatory comments to conftest.py * skip ex-gwtgwt-mt3dms-p10 mf6 example test (MODFLOW-USGS/modflow6#1008) * give test_mt3d.py::test_mfnwt_CrnkNic more retries * rename release/ to scripts/ * move pull_request_prepare.py to scripts/ * separate CI workflows for benchmarks, examples and regression test * name benchmark artifacts benchmarks-<system>-python version>-<workflow run ID> * add postprocess_benchmarks.py to scripts/ * add benchmark postprocessing CI job to (creates artifact benchmarks-<workflow run ID>) * cache modflow executables (per MODFLOW-USGS/executables#7)
- Loading branch information
Showing
22 changed files
with
864 additions
and
666 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,249 @@ | ||
name: FloPy benchmarks | ||
|
||
on: | ||
push: | ||
branches: | ||
- tests | ||
- develop | ||
pull_request: | ||
branches: | ||
- tests | ||
- develop | ||
schedule: | ||
- cron: '0 8 * * *' # run at 8 AM UTC (12 am PST) | ||
|
||
jobs: | ||
benchmark: | ||
name: Benchmarks | ||
runs-on: ${{ matrix.os }} | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
os: [ ubuntu-latest, macos-latest ] | ||
python-version: [ 3.7, 3.8, 3.9, "3.10" ] | ||
exclude: | ||
# avoid shutil.copytree infinite recursion bug | ||
# https://github.com/python/cpython/pull/17098 | ||
- python-version: '3.8.0' | ||
include: | ||
- os: ubuntu-latest | ||
path: ~/.cache/pip | ||
- os: macos-latest | ||
path: ~/Library/Caches/pip | ||
defaults: | ||
run: | ||
shell: bash | ||
timeout-minutes: 90 | ||
|
||
steps: | ||
- name: Checkout repo | ||
uses: actions/[email protected] | ||
|
||
- name: Cache Python | ||
uses: actions/cache@v3 | ||
with: | ||
path: ${{ matrix.path }} | ||
key: ${{ runner.os }}-${{ matrix.python-version }}-pip-${{ hashFiles('setup.cfg') }} | ||
restore-keys: | | ||
${{ runner.os }}-${{ matrix.python-version }}-pip- | ||
- name: Setup Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
|
||
- name: Get branch name | ||
uses: nelonoel/[email protected] | ||
|
||
- name: Install Python dependencies | ||
run: | | ||
python -m pip install --upgrade pip | ||
pip install . | ||
pip install ".[test, optional]" | ||
- name: Cache Modflow executables | ||
uses: actions/cache@v3 | ||
with: | ||
path: $HOME/.local/bin | ||
key: modflow-exes-${{ matrix.os }} | ||
restore-keys: | | ||
modflow-exes-${{ matrix.os }} | ||
- name: Install Modflow executables | ||
working-directory: ./autotest | ||
run: | | ||
mkdir -p $HOME/.local/bin | ||
get-modflow $HOME/.local/bin | ||
echo "$HOME/.local/bin" >> $GITHUB_PATH | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Run benchmarks | ||
working-directory: ./autotest | ||
run: | | ||
mkdir -p .benchmarks | ||
pytest -v --durations=0 --benchmark-only --benchmark-json .benchmarks/${{ matrix.os }}_python${{ matrix.python-version }}.json --keep-failed=.failed | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Upload failed benchmark artifact | ||
uses: actions/upload-artifact@v2 | ||
if: failure() | ||
with: | ||
name: failed-${{ matrix.os }}-${{ matrix.python-version }}-${{ github.run_id }} | ||
path: | | ||
./autotest/.failed/** | ||
- name: Upload benchmark result artifact | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: benchmarks-${{ matrix.os }}-${{ matrix.python-version }}-${{ github.run_id }} | ||
path: | | ||
./autotest/.benchmarks/**/*.json | ||
benchmark_windows: | ||
name: Benchmarks (Windows) | ||
runs-on: windows-latest | ||
strategy: | ||
fail-fast: false | ||
matrix: | ||
python-version: [ 3.7, 3.8, 3.9, "3.10" ] | ||
exclude: | ||
# avoid shutil.copytree infinite recursion bug | ||
# https://github.com/python/cpython/pull/17098 | ||
- python-version: '3.8.0' | ||
defaults: | ||
run: | ||
shell: pwsh | ||
timeout-minutes: 90 | ||
|
||
steps: | ||
- name: Checkout repo | ||
uses: actions/[email protected] | ||
|
||
- name: Get branch name | ||
uses: nelonoel/[email protected] | ||
|
||
- name: Cache Miniconda | ||
uses: actions/cache@v3 | ||
with: | ||
path: ~/conda_pkgs_dir | ||
key: ${{ runner.os }}-${{ matrix.python-version }}-${{ matrix.run-type }}-${{ hashFiles('etc/environment.yml') }} | ||
|
||
# Standard python fails on windows without GDAL installation | ||
# Using custom bash shell ("shell: bash -l {0}") with Miniconda | ||
- name: Setup Miniconda | ||
uses: conda-incubator/[email protected] | ||
with: | ||
python-version: ${{ matrix.python-version }} | ||
channels: conda-forge | ||
auto-update-conda: true | ||
activate-environment: flopy | ||
use-only-tar-bz2: true | ||
|
||
- name: Install Python dependencies | ||
run: | | ||
conda env update --name flopy --file etc/environment.yml | ||
python -m pip install --upgrade pip | ||
pip install https://github.com/modflowpy/pymake/zipball/master | ||
pip install xmipy | ||
pip install . | ||
- name: Cache Modflow executables | ||
uses: actions/cache@v3 | ||
with: | ||
path: C:\Users\runneradmin\.local\bin | ||
key: modflow-exes-${{ runner.os }} | ||
restore-keys: | | ||
modflow-exes-${{ runner.os }} | ||
- name: Install Modflow executables | ||
run: | | ||
md -Force C:\Users\runneradmin\.local\bin | ||
get-modflow "C:\Users\runneradmin\.local\bin" | ||
echo "C:\Users\runneradmin\.local\bin" | Out-File -FilePath $env:GITHUB_PATH -Encoding utf8 -Append | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Run benchmarks | ||
working-directory: ./autotest | ||
run: | | ||
md -Force .benchmarks | ||
pytest -v --durations=0 --benchmark-only --benchmark-json .benchmarks/${{ runner.os }}_python${{ matrix.python-version }}.json --keep-failed=.failed | ||
env: | ||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Upload failed benchmark artifact | ||
uses: actions/upload-artifact@v2 | ||
if: failure() | ||
with: | ||
name: failed-${{ runner.os }}-${{ matrix.python-version }}-${{ github.run_id }} | ||
path: | | ||
./autotest/.failed/** | ||
- name: Upload benchmark result artifact | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: benchmarks-${{ runner.os }}-${{ matrix.python-version }}-${{ github.run_id }} | ||
path: | | ||
./autotest/.benchmarks/**/*.json | ||
post_benchmark: | ||
needs: | ||
- benchmark | ||
- benchmark_windows | ||
name: Postprocess benchmark results | ||
runs-on: ubuntu-latest | ||
defaults: | ||
run: | ||
shell: bash | ||
timeout-minutes: 10 | ||
|
||
steps: | ||
- name: Checkout repo | ||
uses: actions/[email protected] | ||
|
||
- name: Cache Python | ||
uses: actions/cache@v3 | ||
with: | ||
path: ~/.cache/pip | ||
key: ${{ runner.os }}-3.7-pip-${{ hashFiles('setup.cfg') }} | ||
restore-keys: | | ||
${{ runner.os }}-3.7-pip- | ||
- name: Setup Python | ||
uses: actions/setup-python@v4 | ||
with: | ||
python-version: 3.7 | ||
|
||
- name: Install Python dependencies | ||
run: | | ||
python -m pip install --upgrade pip | ||
pip install numpy pandas matplotlib seaborn | ||
- name: Download all artifacts | ||
uses: actions/download-artifact@v3 | ||
with: | ||
path: ./autotest/.benchmarks | ||
|
||
- name: Process benchmark results | ||
run: | | ||
gh api -X GET -H "Accept: application/vnd.github+json" /repos/modflowpy/flopy/actions/artifacts >> artifacts.json | ||
cat artifacts.json | ||
python ./scripts/get_benchmark_artifact_ids.py artifacts.json | xargs -I@ bash -c "gh api -H 'Accept: application/vnd.github+json' /repos/modflowpy/flopy/actions/artifacts/@/zip >> ./autotest/.benchmarks/@.zip" | ||
zipfiles=( ./autotest/.benchmarks/*.zip ) | ||
if (( ${#zipfiles[@]} )); then | ||
unzip -o './autotest/.benchmarks/*.zip' -d ./autotest/.benchmarks | ||
fi | ||
python ./scripts/postprocess_benchmarks.py ./autotest/.benchmarks ./autotest/.benchmarks | ||
env: | ||
ARTIFACTS: ${{steps.run_tests.outputs.artifact_ids}} | ||
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} | ||
|
||
- name: Upload benchmark results | ||
uses: actions/upload-artifact@v2 | ||
with: | ||
name: benchmarks-${{ github.run_id }} | ||
path: | | ||
./autotest/.benchmarks/*.csv | ||
./autotest/.benchmarks/*.png |
Oops, something went wrong.