Skip to content

Add comment in full_multisolve.py about ROMs installation #1720

Add comment in full_multisolve.py about ROMs installation

Add comment in full_multisolve.py about ROMs installation #1720

Workflow file for this run

name: 🔨 Build scikit-decide
on:
push:
branches:
- "**"
pull_request:
workflow_dispatch:
schedule:
- cron: '45 1 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
BOOST_DIR: 3rdparty/boost
BOOST_VERSION: "1.76.0"
SKDECIDE_SKIP_DEPS: 1
jobs:
setup:
runs-on: ubuntu-latest
outputs:
python_version: ${{ steps.generate-matrix.outputs.python_version }}
python_version_per_os: ${{ steps.generate-matrix.outputs.python_version_per_os }}
build: ${{ steps.generate-matrix.outputs.build}}
test: ${{ steps.generate-matrix.outputs.test}}
do_macos: ${{ steps.generate-matrix.outputs.do_macos}}
do_ubuntu: ${{ steps.generate-matrix.outputs.do_ubuntu}}
do_windows: ${{ steps.generate-matrix.outputs.do_windows}}
build_doc: ${{ steps.generate-matrix.outputs.build_doc}}
steps:
- uses: actions/setup-python@v4
with:
python-version: 3.8
- name: Generate Matrix
id: generate-matrix
shell: python3 {0}
run: |
from os import environ
python_version = ["3.8", "3.9", "3.10", "3.11"]
build = [ "macos-latest", "ubuntu-latest", "windows-latest" ]
test = [ "macos-11", "macos-12", "ubuntu-22.04", "ubuntu-20.04", "windows-2019", "windows-2022"]
build_doc = "true"
if "${{ github.event_name }}" != "schedule":
to_bool = lambda s: True if s == "true" else False
python_filter = {
'3.11' : to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.11]') }}"),
'3.8' : to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.8]') }}"),
'3.9' : to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.9]') }}"),
'3.10' : to_bool("${{ contains(github.event.head_commit.message, '[ci: python-3.10]') }}"),
}
if any(python_filter.values()):
python_version = [v for v in python_version if python_filter[v]]
os_filter = {
'macos-11' : to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-11]') }}"),
'macos-12' : to_bool("${{ contains(github.event.head_commit.message, '[ci: macos-12]') }}"),
'ubuntu-22.04' : to_bool("${{ contains(github.event.head_commit.message, '[ci: ubuntu-22.04]') }}"),
'ubuntu-20.04' : to_bool("${{ contains(github.event.head_commit.message, '[ci: ubuntu-20.04]') }}"),
'windows-2019' : to_bool("${{ contains(github.event.head_commit.message, '[ci: windows-2019]') }}"),
'windows-2022' : to_bool("${{ contains(github.event.head_commit.message, '[ci: windows-2022]') }}"),
}
if set(os_filter.keys()) != set(test):
raise Exception("test and os_filter do not contain the same keys")
if "${{ contains(github.event.head_commit.message, '[ci: windows]') }}" == "true":
os_filter.update({k: True for k in os_filter if k.startswith("windows")})
if "${{ contains(github.event.head_commit.message, '[ci: macos]') }}" == "true":
os_filter.update({k: True for k in os_filter if k.startswith("macos")})
if "${{ contains(github.event.head_commit.message, '[ci: ubuntu]') }}" == "true":
os_filter.update({k: True for k in os_filter if k.startswith("ubuntu")})
# If there is no keyword, proceed as if all were present
if not any(os_filter.values()):
os_filter.update({k: True for k in os_filter})
test = [ v for v in test if os_filter[v]]
test_os = { v.split('-')[0] for v in test }
build = [ v for v in build if v.split('-')[0] in test_os ]
if "${{ contains(github.event.head_commit.message, '[ci: skip-doc]') }}" == "true" or "ubuntu-latest" not in build:
build_doc = "false"
oses = ["macos", "ubuntu", "windows"]
build_dict = {os : [k for k in build if k.startswith(os)] for os in oses}
python_version_per_os = {os: python_version for os in oses}
# remove python 3.11 for windows: dependency conflict from pyarrow prevent testing the wheel windows python 3.11
python_version_per_os["windows"] = [v for v in python_version if v != "3.11"]
# update build_dict by removing os without any python version
for os in build_dict:
if len(python_version_per_os[os]) == 0:
build_dict[os] = []
with open(environ["GITHUB_OUTPUT"], "a") as f:
f.write(f"build={build_dict}\n")
f.write(f"test={dict({os : [k for k in test if k.startswith(os)] for os in oses})}\n")
f.write(f"build_doc={build_doc}\n")
for os in oses:
f.write(f"do_{os}={'true' if len(build_dict[os]) > 0 else 'false'}\n")
f.write(f"python_version={python_version}\n")
f.write(f"python_version_per_os={python_version_per_os}\n")
lint-sources:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: "3.8"
- name: install pre-commit
run: python -m pip install pre-commit
- name: get cached pre-commit hooks
uses: actions/cache@v3
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ env.pythonLocation }}|${{ hashFiles('.pre-commit-config.yaml') }}
- name: pre-commit checks
run: pre-commit run --show-diff-on-failure --color=always --all-files
build-windows:
needs: [setup]
if: needs.setup.outputs.do_windows == 'true'
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.build).windows }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).windows }}
fail-fast: false
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout scikit-decide source code
uses: actions/checkout@v3
with:
submodules: true
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Load cached venv
id: cached-pip-wheels
uses: actions/cache@v3
with:
path: ~/.cache
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- name: Restore Boost cache
uses: actions/cache@v3
id: cache-boost
with:
path: ${{env.BOOST_DIR}}
key: BOOST_${{env.BOOST_VERSION}}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true'
run: |
mkdir -p $BOOST_DIR
curl --silent --location --output - \
https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\
tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost
shell: bash
- name: Restore build dependencies
id: cache-build-dependencies
uses: actions/cache@v3
with:
path: |
skdecide/hub/bin
skdecide/hub/share
skdecide/hub/*.msc
key: ${{ runner.os }}-cache-deps
- name: Update SKDECIDE_SKIP_DEPS
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV
- name: Build wheel
run: |
export "Boost_ROOT=$PWD/$BOOST_DIR"
python -m pip install --upgrade pip
pip install build poetry-dynamic-versioning
python -m build --sdist --wheel
- name: Update build cache from wheels
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: 7z x dist/*.whl -y
- name: Upload as build artifacts
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist/*.whl
build-macos:
needs: [setup]
if: needs.setup.outputs.do_macos == 'true'
strategy:
matrix:
arch: ["arm64", "x86_64"] # NB: only x86_64 wheel will be tested as no macosx_arm64 github runner available
os: ${{ fromJSON(needs.setup.outputs.build).macos }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).macos }}
fail-fast: false
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout scikit-decide source code
uses: actions/checkout@v3
with:
submodules: true
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Load cached venv
id: cached-pip-wheels
uses: actions/cache@v3
with:
path: ~/.cache
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- name: Restore Boost cache
uses: actions/cache@v3
id: cache-boost
with:
path: ${{env.BOOST_DIR}}
key: BOOST_${{env.BOOST_VERSION}}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true'
run: |
mkdir -p $BOOST_DIR
curl --silent --location --output - \
https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\
tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost
shell: bash
- name: Restore build dependencies
id: cache-build-dependencies
uses: actions/cache@v3
with:
path: |
skdecide/hub/bin
skdecide/hub/share
skdecide/hub/*.msc
key: ${{ runner.os }}-cache-deps
- name: Update SKDECIDE_SKIP_DEPS
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV
- name: Install and restore ccache
uses: hendrikmuhs/[email protected]
with:
key: ${{ runner.os }}-py${{ matrix.python-version }}
max-size: 80M
- name: Let cmake use ccache
run: |
echo "CMAKE_CXX_COMPILER_LAUNCHER=ccache" >> ${GITHUB_ENV}
echo "CMAKE_C_COMPILER_LAUNCHER=ccache" >> ${GITHUB_ENV}
- name: Build wheel
env:
ARCH: ${{ matrix.arch }}
PYTHON_VERSION: ${{ matrix.python-version }}
run: |
if [[ "$ARCH" == arm64 ]]; then
# SciPy requires 12.0 on arm to prevent kernel panics
# https://github.com/scipy/scipy/issues/14688
# We use the same deployment target to match SciPy.
export MACOSX_DEPLOYMENT_TARGET=12.0
OPENMP_URL="https://anaconda.org/conda-forge/llvm-openmp/11.1.0/download/osx-arm64/llvm-openmp-11.1.0-hf3c4609_1.tar.bz2"
else
export MACOSX_DEPLOYMENT_TARGET=10.15
OPENMP_URL="https://anaconda.org/conda-forge/llvm-openmp/11.1.0/download/osx-64/llvm-openmp-11.1.0-hda6cdc1_1.tar.bz2"
fi
PYTHON_VERSION_WO_DOT=$(echo ${PYTHON_VERSION} | sed -e 's/\.//g') # remove "."
MACOSX_DEPLOYMENT_TARGET_WO_DOT=$(echo ${MACOSX_DEPLOYMENT_TARGET} | sed -e 's/\./_/g') # replace "." by "_"
# install appropriate version of openmp
sudo conda create -n build $OPENMP_URL
# make openmp and boost available
export Boost_ROOT=$PWD/$BOOST_DIR
export OpenMP_ROOT=$CONDA/envs/build
export CPPFLAGS="$CPPFLAGS -Xpreprocessor -fopenmp"
export CFLAGS="$CFLAGS -I$OpenMP_ROOT/include"
export CXXFLAGS="$CXXFLAGS -I$OpenMP_ROOT/include"
export LDFLAGS="$LDFLAGS -Wl,-rpath,$OpenMP_ROOT/lib -L$OpenMP_ROOT/lib -lomp"
# cmake flag to cross-compile the c++
export CMAKE_OSX_ARCHITECTURES=${ARCH}
python -m pip install cibuildwheel
# cibuildwheel flags
export CIBW_BUILD_FRONTEND="build"
export CIBW_ARCHS=${ARCH}
export CIBW_BUILD="cp${PYTHON_VERSION_WO_DOT}-macosx_${ARCH}"
# build wheel
python -m cibuildwheel --output-dir wheelhouse
# set the proper platform tag
#  - with poetry build + cross-compilation for arm64, the tag could been still x64_64 (https://cibuildwheel.readthedocs.io/en/stable/faq/#how-to-cross-compile)
# - we downgrade the displayed macosx version to ensure compatibility with lesser macosx than the ones used on this runner
pip install "wheel>=0.40"
wheel tags --platform-tag macosx_${MACOSX_DEPLOYMENT_TARGET_WO_DOT}_${ARCH} --remove wheelhouse/*.whl
- name: Update build cache from wheels
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: 7z x wheelhouse/*.whl -y
- name: Upload as build artifacts
uses: actions/upload-artifact@v3
with:
name: wheels
path: wheelhouse/*.whl
build-ubuntu:
needs: [setup]
if: needs.setup.outputs.do_ubuntu == 'true'
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.build).ubuntu }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).ubuntu }}
fail-fast: false
defaults:
run:
shell: bash
runs-on: ${{ matrix.os }}
steps:
- name: Checkout scikit-decide source code
uses: actions/checkout@v3
with:
submodules: true
fetch-depth: 0
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Load cached venv
id: cached-pip-wheels
uses: actions/cache@v3
with:
path: ~/.cache
key: venv-${{ runner.os }}-${{ hashFiles('**/poetry.lock') }}
- name: Restore Boost cache
uses: actions/cache@v3
id: cache-boost
with:
path: ${{env.BOOST_DIR}}
key: BOOST_${{env.BOOST_VERSION}}
- name: Install Boost
if: steps.cache-boost.outputs.cache-hit != 'true'
run: |
mkdir -p $BOOST_DIR
curl --silent --location --output - \
https://boostorg.jfrog.io/artifactory/main/release/$BOOST_VERSION/source/boost_${BOOST_VERSION//./_}.tar.bz2 |\
tar jxf - -C $BOOST_DIR --strip-components=1 boost_${BOOST_VERSION//./_}/boost
shell: bash
- name: Restore build dependencies
id: cache-build-dependencies
uses: actions/cache@v3
with:
path: |
skdecide/hub/bin
skdecide/hub/share
skdecide/hub/*.msc
key: ${{ runner.os }}-cache-deps
- name: Update SKDECIDE_SKIP_DEPS
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: echo "SKDECIDE_SKIP_DEPS=0" >> $GITHUB_ENV
- name: Restore docker dev image
id: cache-dev-deps
uses: actions/cache@v3
with:
path: /tmp/docker
key: dev-deps-${{ runner.os }}-${{ hashFiles('scripts/build-skdecide_dev.sh', 'scripts/Dockerfile_x86_64_dev') }}
- name: Restore ccache cache
id: ccache-restore
uses: actions/cache@v3
with:
path: .ccache
key: ccache-${{ runner.os }}-py${{ matrix.python-version }}-${{ github.run_id }}-${{github.run_number}}
restore-keys: ccache-${{ runner.os }}-py${{ matrix.python-version }}
- name: Build wheels
run: |
# Load skdecide_dev image from cache, or build it if not found
if test -f /tmp/docker/skdecide_dev.tar; then
docker image load -i /tmp/docker/skdecide_dev.tar
else
docker build -f scripts/Dockerfile_x86_64_dev -t skdecide_dev .
mkdir -p /tmp/docker
docker image save -o /tmp/docker/skdecide_dev.tar skdecide_dev
fi
# The existence of .ccache directory triggers ccache use in builds-manylinux-wheels.sh
test -d .ccache || mkdir .ccache
docker build -f scripts/Dockerfile_x86_64 -t skdecide_x86_64 --build-arg PYTHON_VERSION=${{matrix.python-version}} --build-arg SKDECIDE_SKIP_DEPS=${SKDECIDE_SKIP_DEPS} --build-arg BOOST_DIR=${BOOST_DIR} .
# Fetch wheels from Docker
docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/dist /mytmp
# Fetch ccache from Docker
docker run --rm -v $PWD:/mytmp skdecide_x86_64 cp -r /io/.ccache /mytmp
- name: Update build cache from wheels
if: steps.cache-build-dependencies.outputs.cache-hit != 'true'
run: 7z x dist/*.whl -y
- name: Upload as build artifacts
uses: actions/upload-artifact@v3
with:
name: wheels
path: dist/*.whl
test-windows:
needs: [build-windows, setup]
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.test).windows }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).windows }}
compiler: [gnu]
fail-fast: true
runs-on: ${{ matrix.os }}
defaults:
run:
shell: bash
env:
minizinc_config_cmdline: export PATH=$PATH:~/AppData/Local/Programs/MiniZinc
minizinc_cache_path: ~/AppData/Local/Programs/MiniZinc
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-bundled-setup-win64.exe
minizinc_downloaded_filepath: minizinc_setup.exe
minizinc_install_cmdline: cmd //c "minizinc_setup.exe /verysilent /currentuser /norestart /suppressmsgboxes /sp"
steps:
- uses: actions/checkout@v3
with:
submodules: true
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: wheels
path: wheels
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v3
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ env.minizinc_config_cmdline }}
minizinc --version
- name: Install scikit-decide and test dependencies
run: |
python_version=${{ matrix.python-version }}
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*win*.whl)
pip install ${wheelfile}[all] pytest gymnasium[classic-control]
- name: Test with pytest
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# test minizinc
python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')"
# run pytest
# we split tests using
# - c++ scikit-decide library
# - ortools (scheduling)
# - deep-learning solvers (solvers/python)
# - from others
# to avoid openmp versions conflicts
pytest -v -s tests/*/cpp
pytest -v -s tests/solvers/python
pytest -v -s tests/scheduling
pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling
test-macos:
needs: [build-macos, setup]
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.test).macos }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).macos }}
fail-fast: true
runs-on: ${{ matrix.os }}
env:
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/MiniZincIDE.app/Contents/Resources
minizinc_cache_path: $(pwd)/bin/MiniZincIDE.app
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-bundled.dmg
minizinc_downloaded_filepath: bin/minizinc.dmg
minizinc_install_cmdline: sudo hdiutil attach bin/minizinc.dmg; sudo cp -R /Volumes/MiniZinc*/MiniZincIDE.app bin/.
steps:
- uses: actions/checkout@v3
- name: Install libomp package
run: brew install libomp
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: wheels
path: wheels
- name: Create bin/
run: mkdir -p bin
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v3
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ env.minizinc_config_cmdline }}
minizinc --version
- name: Install scikit-decide and test dependencies
run: |
python_version=${{ matrix.python-version }}
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*macos*x86_64.whl)
pip install ${wheelfile}[all] pytest gymnasium[classic-control]
- name: Test with pytest
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# test minizinc
python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')"
# run pytest
# we split tests using
# - c++ scikit-decide library
# - ortools (scheduling)
# - deep-learning solvers (solvers/python)
# - from others
# to avoid openmp versions conflicts
pytest -v -s tests/*/cpp
pytest -v -s tests/solvers/python
pytest -v -s tests/scheduling
pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling
test-ubuntu:
needs: [build-ubuntu, setup]
strategy:
matrix:
os: ${{ fromJSON(needs.setup.outputs.test).ubuntu }}
python-version: ${{ fromJSON(needs.setup.outputs.python_version_per_os).ubuntu }}
fail-fast: true
runs-on: ${{ matrix.os }}
env:
minizinc_config_cmdline: export PATH=$PATH:$(pwd)/bin/squashfs-root/usr/bin; export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:$(pwd)/bin/squashfs-root/usr/lib
minizinc_cache_path: $(pwd)/bin/squashfs-root
minizinc_url: https://github.com/MiniZinc/MiniZincIDE/releases/download/2.6.3/MiniZincIDE-2.6.3-x86_64.AppImage
minizinc_downloaded_filepath: bin/minizinc.AppImage
minizinc_install_cmdline: cd bin; sudo chmod +x minizinc.AppImage; sudo ./minizinc.AppImage --appimage-extract; cd ..
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: wheels
path: wheels
- name: Create bin/
run: mkdir -p bin
- name: get MininZinc path to cache
id: get-mzn-cache-path
run: |
echo "path=${{ env.minizinc_cache_path }}" >> $GITHUB_OUTPUT # expands variables
- name: Restore MiniZinc cache
id: cache-minizinc
uses: actions/cache@v3
with:
path: ${{ steps.get-mzn-cache-path.outputs.path }}
key: ${{ env.minizinc_url }}
- name: Download MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
curl -o "${{ env.minizinc_downloaded_filepath }}" -L ${{ env.minizinc_url }}
- name: Install MiniZinc
if: steps.cache-minizinc.outputs.cache-hit != 'true'
run: |
${{ env.minizinc_install_cmdline }}
- name: Test minizinc install
run: |
${{ env.minizinc_config_cmdline }}
minizinc --version
- name: Install scikit-decide
run: |
python_version=${{ matrix.python-version }}
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*manylinux*.whl)
pip install ${wheelfile}[all] pytest gymnasium[classic-control] docopt commonmark
- name: Test with pytest
run: |
# configure minizinc
${{ env.minizinc_config_cmdline }}
# test minizinc
python -c "import minizinc; print(minizinc.default_driver.minizinc_version); minizinc.Solver.lookup('gecode')"
# run pytest
# we split tests using
# - c++ scikit-decide library
# - ortools (scheduling)
# - deep-learning solvers (solvers/python)
# - from others
# to avoid openmp versions conflicts
pytest -v -s tests/*/cpp
pytest -v -s tests/solvers/python
pytest -v -s tests/scheduling
pytest -v -s --ignore-glob tests/*/cpp --ignore tests/solvers/python --ignore tests/scheduling
- name: Test python block codes from guide
run: |
python scripts/md2py.py docs/guide/README.md tests/test_guide.py
python tests/test_guide.py
build-doc:
needs: [build-macos, build-ubuntu, build-windows, test-ubuntu, setup]
if: needs.setup.outputs.build_doc == 'true'
strategy:
matrix:
os: [ubuntu-latest]
python-version: ["3.10"]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Set env variables for github+binder links in doc
run: |
# binder environment repo and branch
AUTODOC_BINDER_ENV_GH_REPO_NAME=${GITHUB_REPOSITORY}
AUTODOC_BINDER_ENV_GH_BRANCH="binder"
# notebooks source repo and branch depending if it is a commit push or a PR
if [[ $GITHUB_REF == refs/pull* ]];
then
AUTODOC_NOTEBOOKS_REPO_URL="${GITHUB_SERVER_URL}/${{ github.event.pull_request.head.repo.full_name }}"
AUTODOC_NOTEBOOKS_BRANCH=${GITHUB_HEAD_REF}
elif [[ $GITHUB_REF == refs/heads* ]];
then
AUTODOC_NOTEBOOKS_REPO_URL=${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}
AUTODOC_NOTEBOOKS_BRANCH=${GITHUB_REF/refs\/heads\//}
fi
# export in GITHUB_ENV for next steps
echo "AUTODOC_BINDER_ENV_GH_REPO_NAME=${AUTODOC_BINDER_ENV_GH_REPO_NAME}" >> $GITHUB_ENV
echo "AUTODOC_BINDER_ENV_GH_BRANCH=${AUTODOC_BINDER_ENV_GH_BRANCH}" >> $GITHUB_ENV
echo "AUTODOC_NOTEBOOKS_REPO_URL=${AUTODOC_NOTEBOOKS_REPO_URL}" >> $GITHUB_ENV
echo "AUTODOC_NOTEBOOKS_BRANCH=${AUTODOC_NOTEBOOKS_BRANCH}" >> $GITHUB_ENV
# check computed variables
echo "Binder env: ${AUTODOC_BINDER_ENV_GH_REPO_NAME}/${AUTODOC_BINDER_ENV_GH_BRANCH}"
echo "Notebooks source: ${AUTODOC_NOTEBOOKS_REPO_URL}/tree/${AUTODOC_NOTEBOOKS_BRANCH}"
- uses: actions/checkout@v3
with:
submodules: true
- name: Setup python
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: wheels
path: wheels
- name: Install scikit-decide
run: |
# find proper wheel and install it
python_version=${{ matrix.python-version }}
wheelfile=$(ls ./wheels/scikit_decide*-cp${python_version/\./}-*manylinux*.whl)
pip install ${wheelfile}[all]
- name: generate documentation
run: |
yarn global add vuepress && yarn install
export NODE_OPTIONS=--openssl-legacy-provider # avoid issue with node 18 and current dependencies (ok because no interaction with external network during the build)
export DO_SKIP_MZN_CHECK=1 # avoid having to install minizinc for discrete-optimization
yarn docs:build
touch docs/.vuepress/dist/.nojekyll
- name: upload as artifact
uses: actions/upload-artifact@v3
with:
name: doc
path: docs/.vuepress/dist
upload-doc:
needs: [build-doc, test-windows, test-macos, test-ubuntu]
if: github.ref == 'refs/heads/master'
strategy:
matrix:
os: [ubuntu-latest]
python-version: ["3.8"]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- name: Download artifacts
uses: actions/download-artifact@v3
with:
name: doc
path: docs/.vuepress/dist
- name: Deploy documentation in root folder on GH pages 🚀
uses: JamesIves/github-pages-deploy-action@v4
with:
branch: gh-pages # The branch the action should deploy to.
folder: docs/.vuepress/dist # The folder the action should deploy.
target-folder: / # The folder the action should deploy to.
commit-message: publish documentation
single-commit: true
clean-exclude: |
"version/*"
upload-nightly:
if: (github.ref == 'refs/heads/master') && (github.repository == 'airbus/scikit-decide') && (github.event_name == 'schedule')
needs: [test-ubuntu, test-macos, test-windows]
runs-on: ubuntu-latest
steps:
- uses: actions/download-artifact@v3
with:
name: wheels
path: dist/
- run: |
zip -r dist.zip dist/
- uses: actions/github-script@v6
id: asset
with:
script: |
const fs = require('fs');
// Get the ref for master
const master_sha = '${{ github.sha }}';
console.log(`master reference ${master_sha}`);
// Retrieve ref for tag `nightly`
let ref_nightly = null;
try {
ref_nightly = await github.rest.git.getRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: 'tags/nightly',
});
if (ref_nightly.data.object.sha === master_sha) {
return '';
}
} catch (err) {
// The tag does not exist so let's create it
ref_nightly = await github.rest.git.createRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: 'refs/tags/nightly',
sha: master_sha,
});
}
// Call the GitHub API to get a release by tag
let release = null;
try {
release = await github.rest.repos.getReleaseByTag({
owner: context.repo.owner,
repo: context.repo.repo,
tag: 'nightly',
});
console.log(`Found release ${release.data.tag_name} ${release.data.draft} ${release.data.prerelease}`);
} catch (err) {
console.log(`Release 'nightly' not found`);
// If the release doesn't exist, create it
release = await github.rest.repos.createRelease({
owner: context.repo.owner,
repo: context.repo.repo,
tag_name: 'nightly',
name: 'nightly',
body: 'Nightly release crafted with ♥️ somewhere on 🌎',
draft: false,
prerelease: true,
});
console.log(`Created release ${release.data.tag_name} ${release.data.draft} ${release.data.prerelease}`);
}
console.log(`Release does exist with tag ${release.data.tag_name} [${release.data.draft} ${release.data.prerelease}]`);
// At this stage both tag & release exist
// Update nightly tag
await github.rest.git.updateRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: 'tags/nightly',
sha: master_sha,
force: true,
});
console.log(`Updated tag with sha ${ref_nightly.data.object.sha}`);
// Update the release
await github.rest.repos.updateRelease({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.data.id,
tag_name: 'nightly',
name: 'nightly',
body: 'Nightly release crafted with ♥️ somewhere on 🌎',
draft: false,
prerelease: true,
});
console.log(`Updated ${release.data.tag_name} nightly release ${release.data.draft} ${release.data.prerelease}`);
// Get all tags and keep the newest one starting by v
let newest_tag = { name: 'v0.0.0' };
const tags = await github.rest.repos.listTags({
owner: context.repo.owner,
repo: context.repo.repo,
});
// Keep latest tag
for (const tag of tags.data) {
if (tag.name.startsWith('v')) {
if (tag.name.localeCompare(newest_tag.name, undefined, { numeric: true}) > 0) {
newest_tag = tag;
}
}
}
console.log(`Previous release has tag ${newest_tag.name} → ${newest_tag.commit.sha}`);
// Count all commits between HEAD and newest tag
// Limited to 250 commits
const distance = await github.rest.repos.compareCommitsWithBasehead({
owner: context.repo.owner,
repo: context.repo.repo,
basehead: `${newest_tag.commit.sha}...${master_sha}`,
}).then(d => d.data.total_commits);
// Zip a zip file from dist directory
let release_name = `nightly_${distance}_${master_sha.substring(0,8)}` + '.zip';
console.log(`Release file name: ${release_name}`);
fs.renameSync('dist.zip', release_name);
// Upload the zip file to GitHub
const uploadedAsset = await github.rest.repos.uploadReleaseAsset({
owner: context.repo.owner,
repo: context.repo.repo,
release_id: release.data.id,
name: release_name,
data: fs.readFileSync(release_name),
headers: {
'content-type': 'application/zip',
},
});
return uploadedAsset.data.browser_download_url;
result-encoding: string
- uses: actions/checkout@v3
# only if a nightly release occured
if: ${{ steps.asset.outputs.result != '' }}
with:
ref: binder
- name: Force a rebuild of binder environment
# only if a nightly release occured
if: ${{ steps.asset.outputs.result != '' }}
run: |
# update nightly_build_url to install last nightly build over last release
sed -i -e 's|nightly_build_url="[^"]*"|nightly_build_url="${{ steps.asset.outputs.result }}"|' postBuild
git config user.name "Actions"
git config user.email "[email protected]"
git commit postBuild -m "Use scikit-decide last nightly build"
git push origin binder
- uses: actions/checkout@v3 # checkout triggering branch to get scripts/trigger_binder.sh
# only if a nightly release occured
if: ${{ steps.asset.outputs.result != '' }}
- name: Trigger a build on each BinderHub deployments in the mybinder.org federation
# only if a nightly release occured
if: ${{ steps.asset.outputs.result != '' }}
run: |
bash scripts/trigger_binder.sh https://ovh.mybinder.org/build/gh/${GITHUB_REPOSITORY}/binder
bash scripts/trigger_binder.sh https://ovh2.mybinder.org/build/gh/${GITHUB_REPOSITORY}/binder
bash scripts/trigger_binder.sh https://notebooks.gesis.org/binder/build/gh/${GITHUB_REPOSITORY}/binder