diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 8f5c8c5..3af99ba 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -3,8 +3,22 @@ # These owners will be the default owners for everything in the # repo. Unless a later match takes precedence, these owners will be # requested for review when someone opens a pull request. -* @dav3r @felddy @jasonodoom @jsf9k @mcdonnnj +* @dav3r @felddy @jsf9k @mcdonnnj # These folks own any files in the .github directory at the root of # the repository and any of its subdirectories. -/.github/ @dav3r @felddy @jasonodoom @jsf9k @mcdonnnj +/.github/ @dav3r @felddy @jsf9k @mcdonnnj + +# These folks own all linting configuration files. +/.ansible-lint @dav3r @felddy @jsf9k @mcdonnnj +/.bandit.yml @dav3r @felddy @jsf9k @mcdonnnj +/.flake8 @dav3r @felddy @jsf9k @mcdonnnj +/.isort.cfg @dav3r @felddy @jsf9k @mcdonnnj +/.mdl_config.yaml @dav3r @felddy @jsf9k @mcdonnnj +/.pre-commit-config.yaml @dav3r @felddy @jsf9k @mcdonnnj +/.prettierignore @dav3r @felddy @jsf9k @mcdonnnj +/.yamllint @dav3r @felddy @jsf9k @mcdonnnj +/requirements.txt @dav3r @felddy @jsf9k @mcdonnnj +/requirements-dev.txt @dav3r @felddy @jsf9k @mcdonnnj +/requirements-test.txt @dav3r @felddy @jsf9k @mcdonnnj +/setup-env @dav3r @felddy @jsf9k @mcdonnnj diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 61116c3..4688810 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -18,18 +18,21 @@ updates: - dependency-name: actions/checkout - dependency-name: actions/setup-go - dependency-name: actions/setup-python + - dependency-name: cisagov/setup-env-github-action - dependency-name: crazy-max/ghaction-dump-context - dependency-name: crazy-max/ghaction-github-labeler - dependency-name: crazy-max/ghaction-github-status + - dependency-name: GitHubSecurityLab/actions-permissions + - dependency-name: hashicorp/setup-packer - dependency-name: hashicorp/setup-terraform - dependency-name: mxschmitt/action-tmate - dependency-name: step-security/harden-runner # Managed by cisagov/skeleton-docker - dependency-name: actions/download-artifact - - dependency-name: actions/github-script - dependency-name: actions/upload-artifact - dependency-name: docker/build-push-action - dependency-name: docker/login-action + - dependency-name: docker/metadata-action - dependency-name: docker/setup-buildx-action - dependency-name: docker/setup-qemu-action - dependency-name: github/codeql-action diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 6de39a9..08fd517 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -2,45 +2,65 @@ name: build on: + merge_group: + types: + - checks_requested + pull_request: push: branches: - "**" tags: - "v*.*.*" - pull_request: - schedule: - - cron: "0 10 * * *" # everyday at 10am repository_dispatch: # Respond to rebuild requests. See: https://github.com/cisagov/action-apb/ - types: [apb] + types: + - apb + schedule: + - cron: "0 10 * * *" # everyday at 10am workflow_dispatch: inputs: - remote-shell: - description: "Debug with remote shell" - required: true - default: "false" image-tag: + default: "dispatch" description: "Tag to apply to pushed images" required: true - default: "dispatch" + remote-shell: + default: "false" + description: "Debug with remote shell" + required: true + +# Set a default shell for any run steps. The `-Eueo pipefail` sets errtrace, +# nounset, errexit, and pipefail. The `-x` will print all commands as they are +# run. Please see the GitHub Actions documentation for more information: +# https://docs.github.com/en/actions/using-jobs/setting-default-values-for-jobs +defaults: + run: + shell: bash -Eueo pipefail -x {0} env: BUILDX_CACHE_DIR: ~/.cache/buildx - CURL_CACHE_DIR: ~/.cache/curl IMAGE_NAME: cisagov/guacscanner PIP_CACHE_DIR: ~/.cache/pip - PLATFORMS: "linux/amd64,linux/arm/v6,linux/arm/v7,\ - linux/arm64,linux/ppc64le,linux/s390x" PRE_COMMIT_CACHE_DIR: ~/.cache/pre-commit RUN_TMATE: ${{ secrets.RUN_TMATE }} + TERRAFORM_DOCS_REPO_BRANCH_NAME: improvement/support_atx_closed_markdown_headers + TERRAFORM_DOCS_REPO_DEPTH: 1 + TERRAFORM_DOCS_REPO_URL: https://github.com/mcdonnnj/terraform-docs.git jobs: diagnostics: name: Run diagnostics + # This job does not need any permissions + permissions: {} runs-on: ubuntu-latest steps: # Note that a duplicate of this step must be added at the top of # each job. + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + # Note that a duplicate of this step must be added at the top of + # each job. - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -48,7 +68,7 @@ jobs: egress-policy: audit - id: github-status name: Check GitHub status - uses: crazy-max/ghaction-github-status@v3 + uses: crazy-max/ghaction-github-status@v4 - id: dump-context name: Dump context uses: crazy-max/ghaction-dump-context@v2 @@ -58,8 +78,15 @@ jobs: name: Lint sources needs: - diagnostics + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -69,23 +96,23 @@ jobs: uses: cisagov/setup-env-github-action@develop - uses: actions/checkout@v4 - id: setup-python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: ${{ steps.setup-env.outputs.python-version }} # We need the Go version and Go cache location for the actions/cache step, # so the Go installation must happen before that. - id: setup-go - uses: actions/setup-go@v4 + uses: actions/setup-go@v5 with: # There is no expectation for actual Go code so we disable caching as # it relies on the existence of a go.sum file. cache: false - go-version: "1.20" - - name: Lookup Go cache directory - id: go-cache + go-version: ${{ steps.setup-env.outputs.go-version }} + - id: go-cache + name: Lookup Go cache directory run: | echo "dir=$(go env GOCACHE)" >> $GITHUB_OUTPUT - - uses: actions/cache@v3 + - uses: actions/cache@v4 env: BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ py${{ steps.setup-python.outputs.python-version }}-\ @@ -93,6 +120,10 @@ jobs: packer${{ steps.setup-env.outputs.packer-version }}-\ tf${{ steps.setup-env.outputs.terraform-version }}-" with: + key: "${{ env.BASE_CACHE_KEY }}\ + ${{ hashFiles('**/requirements-test.txt') }}-\ + ${{ hashFiles('**/requirements.txt') }}-\ + ${{ hashFiles('**/.pre-commit-config.yaml') }}" # Note that the .terraform directory IS NOT included in the # cache because if we were caching, then we would need to use # the `-upgrade=true` option. This option blindly pulls down the @@ -102,30 +133,13 @@ jobs: path: | ${{ env.PIP_CACHE_DIR }} ${{ env.PRE_COMMIT_CACHE_DIR }} - ${{ env.CURL_CACHE_DIR }} ${{ steps.go-cache.outputs.dir }} - key: "${{ env.BASE_CACHE_KEY }}\ - ${{ hashFiles('**/requirements-test.txt') }}-\ - ${{ hashFiles('**/requirements.txt') }}-\ - ${{ hashFiles('**/.pre-commit-config.yaml') }}" restore-keys: | ${{ env.BASE_CACHE_KEY }} - - name: Setup curl cache - run: mkdir -p ${{ env.CURL_CACHE_DIR }} - - name: Install Packer - env: - PACKER_VERSION: ${{ steps.setup-env.outputs.packer-version }} - run: | - PACKER_ZIP="packer_${PACKER_VERSION}_linux_amd64.zip" - curl --output ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" \ - --time-cond ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" \ - --location \ - "https://releases.hashicorp.com/packer/${PACKER_VERSION}/${PACKER_ZIP}" - sudo unzip -d /opt/packer \ - ${{ env.CURL_CACHE_DIR }}/"${PACKER_ZIP}" - sudo mv /usr/local/bin/packer /usr/local/bin/packer-default - sudo ln -s /opt/packer/packer /usr/local/bin/packer - - uses: hashicorp/setup-terraform@v2 + - uses: hashicorp/setup-packer@v3 + with: + version: ${{ steps.setup-env.outputs.packer-version }} + - uses: hashicorp/setup-terraform@v3 with: terraform_version: ${{ steps.setup-env.outputs.terraform-version }} - name: Install go-critic @@ -133,26 +147,38 @@ jobs: PACKAGE_URL: github.com/go-critic/go-critic/cmd/gocritic PACKAGE_VERSION: ${{ steps.setup-env.outputs.go-critic-version }} run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} + - name: Install goimports + env: + PACKAGE_URL: golang.org/x/tools/cmd/goimports + PACKAGE_VERSION: ${{ steps.setup-env.outputs.goimports-version }} + run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - name: Install gosec env: PACKAGE_URL: github.com/securego/gosec/v2/cmd/gosec PACKAGE_VERSION: ${{ steps.setup-env.outputs.gosec-version }} run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - - name: Install shfmt - env: - PACKAGE_URL: mvdan.cc/sh/v3/cmd/shfmt - PACKAGE_VERSION: ${{ steps.setup-env.outputs.shfmt-version }} - run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - name: Install staticcheck env: PACKAGE_URL: honnef.co/go/tools/cmd/staticcheck PACKAGE_VERSION: ${{ steps.setup-env.outputs.staticcheck-version }} run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} - - name: Install Terraform-docs - env: - PACKAGE_URL: github.com/terraform-docs/terraform-docs - PACKAGE_VERSION: ${{ steps.setup-env.outputs.terraform-docs-version }} - run: go install ${PACKAGE_URL}@${PACKAGE_VERSION} + # TODO: https://github.com/cisagov/skeleton-generic/issues/165 + # We are temporarily using @mcdonnnj's forked branch of terraform-docs + # until his PR: https://github.com/terraform-docs/terraform-docs/pull/745 + # is approved. This temporary fix will allow for ATX header support when + # terraform-docs is run during linting. + - name: Clone ATX headers branch from terraform-docs fork + run: | + git clone \ + --branch $TERRAFORM_DOCS_REPO_BRANCH_NAME \ + --depth $TERRAFORM_DOCS_REPO_DEPTH \ + --single-branch \ + $TERRAFORM_DOCS_REPO_URL /tmp/terraform-docs + - name: Build and install terraform-docs binary + run: | + go build \ + -C /tmp/terraform-docs \ + -o $(go env GOPATH)/bin/terraform-docs - name: Install dependencies run: | python -m pip install --upgrade pip setuptools wheel @@ -165,109 +191,46 @@ jobs: uses: mxschmitt/action-tmate@v3 if: env.RUN_TMATE prepare: - # Calculates and publishes outputs that are used by other jobs. - # - # Outputs: - # created: - # The current date-time in RFC3339 format. - # repometa: - # The json metadata describing this repository. - # source_version: - # The source version as reported by the `bump_version.sh show` command. - # tags: - # A comma separated list of Docker tags to be applied to the images on - # Docker Hub. The tags will vary depending on: - # - The event that triggered the build. - # - The branch the build is based upon. - # - The git tag the build is based upon. - # - # When a build is based on a git tag of the form `v*.*.*` the image will - # be tagged on Docker Hub with multiple levels of version specificity. - # For example, a git tag of `v1.2.3+a` will generate Docker tags of - # `:1.2.3_a`, `:1.2.3`, `:1.2`, `:1`, and `:latest`. - # - # Builds targeting the default branch will be tagged with `:edge`. - # - # Builds from other branches will be tagged with the branch name. Solidi - # (`/` characters - commonly known as slashes) in branch names are - # replaced with hyphen-minuses (`-` characters) in the Docker tag. For - # more information about the solidus see these links: - # * https://www.compart.com/en/unicode/U+002F - # * https://en.wikipedia.org/wiki/Slash_(punctuation)#Encoding - # - # Builds triggered by a push event are tagged with a short hash in the - # form: sha-12345678 - # - # Builds triggered by a pull request are tagged with the pull request - # number in the form pr-123. - # - # Builds triggered using the GitHub GUI (workflow_dispatch) are tagged - # with the value specified by the user. - # - # Scheduled builds are tagged with `:nightly`. + # Generate Docker image metadata using the docker/metadata-action GitHub Action. name: Prepare build variables needs: - diagnostics outputs: - created: ${{ steps.prep.outputs.created }} - repometa: ${{ steps.repo.outputs.result }} - source_version: ${{ steps.prep.outputs.source_version }} - tags: ${{ steps.prep.outputs.tags }} + labels: ${{ steps.generate-metadata.outputs.labels }} + tags: ${{ steps.generate-metadata.outputs.tags }} + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 with: egress-policy: audit - uses: actions/checkout@v4 - - name: Gather repository metadata - id: repo - uses: actions/github-script@v7 + - id: generate-metadata + name: Generate Docker image metadata + uses: docker/metadata-action@v5 with: - script: | - const repo = await github.rest.repos.get(context.repo) - return repo.data - - name: Calculate output values - id: prep - run: | - VERSION=noop - SEMVER="^v(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)\.(0|[1-9][0-9]*)(-((0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*)(\.(0|[1-9][0-9]*|[0-9]*[a-zA-Z-][0-9a-zA-Z-]*))*))?(\+([0-9a-zA-Z-]+(\.[0-9a-zA-Z-]+)*))?$" - if [ "${{ github.event_name }}" = "schedule" ]; then - VERSION=nightly - elif [ "${{ github.event_name }}" = "workflow_dispatch" ]; then - VERSION=${{ github.event.inputs.image-tag }} - elif [[ $GITHUB_REF == refs/tags/* ]]; then - VERSION=${GITHUB_REF#refs/tags/} - elif [[ $GITHUB_REF == refs/heads/* ]]; then - VERSION=$(echo ${GITHUB_REF#refs/heads/} | sed -r 's#/+#-#g') - if [ "${{ github.event.repository.default_branch }}" = "$VERSION" ]; - then - VERSION=edge - fi - elif [[ $GITHUB_REF == refs/pull/* ]]; then - VERSION=pr-${{ github.event.number }} - fi - if [[ $VERSION =~ $SEMVER ]]; then - VERSION_NO_V=${VERSION#v} - MAJOR="${BASH_REMATCH[1]}" - MINOR="${BASH_REMATCH[2]}" - PATCH="${BASH_REMATCH[3]}" - TAGS="${IMAGE_NAME}:${VERSION_NO_V//+/_},${IMAGE_NAME}:${MAJOR}.${MINOR}.${PATCH},${IMAGE_NAME}:${MAJOR}.${MINOR},${IMAGE_NAME}:${MAJOR},${IMAGE_NAME}:latest" - else - TAGS="${IMAGE_NAME}:${VERSION}" - fi - if [ "${{ github.event_name }}" = "push" ]; then - TAGS="${TAGS},${IMAGE_NAME}:sha-${GITHUB_SHA::8}" - fi - for i in ${TAGS//,/ } - do - TAGS="${TAGS},ghcr.io/${i}" - done - echo "created=$(date -u +'%Y-%m-%dT%H:%M:%SZ')" >> $GITHUB_OUTPUT - echo "source_version=$(./bump_version.sh show)" >> $GITHUB_OUTPUT - echo "tags=${TAGS}" >> $GITHUB_OUTPUT - echo tags=${TAGS} + images: | + ${{ env.IMAGE_NAME }} + ghcr.io/${{ env.IMAGE_NAME }} + tags: | + type=edge + type=raw,event=workflow_dispatch,value=${{ github.event.inputs.image-tag }} + type=ref,event=branch + type=ref,event=pr + type=ref,event=tag + type=schedule + type=semver,pattern={{major}} + type=semver,pattern={{major}}.{{minor}} + type=semver,pattern={{version}} + type=sha - name: Setup tmate debug session uses: mxschmitt/action-tmate@v3 if: github.event.inputs.remote-shell == 'true' || env.RUN_TMATE @@ -278,8 +241,15 @@ jobs: needs: - diagnostics - prepare + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -292,26 +262,25 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 env: BASE_CACHE_KEY: buildx-${{ runner.os }}- with: - path: ${{ env.BUILDX_CACHE_DIR }} key: ${{ env.BASE_CACHE_KEY }}${{ github.sha }} + path: ${{ env.BUILDX_CACHE_DIR }} restore-keys: | ${{ env.BASE_CACHE_KEY }} - name: Create dist directory run: mkdir -p dist - name: Build image id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: - build-args: | - VERSION=${{ needs.prepare.outputs.source_version }} cache-from: type=local,src=${{ env.BUILDX_CACHE_DIR }} cache-to: type=local,dest=${{ env.BUILDX_CACHE_DIR }} context: . file: ./Dockerfile + labels: ${{ needs.prepare.outputs.labels }} outputs: type=docker,dest=dist/image.tar # Uncomment the following option if you are building an image for use # on Google Cloud Run or AWS Lambda. The current default image output @@ -321,33 +290,10 @@ jobs: tags: ${{ env.IMAGE_NAME }}:latest # not to be pushed # For a list of pre-defined annotation keys and value types see: # https://github.com/opencontainers/image-spec/blob/master/annotations.md - labels: "\ - org.opencontainers.image.created=${{ - needs.prepare.outputs.created }} - - org.opencontainers.image.description=${{ - fromJson(needs.prepare.outputs.repometa).description }} - - org.opencontainers.image.licenses=${{ - fromJson(needs.prepare.outputs.repometa).license.spdx_id }} - - org.opencontainers.image.revision=${{ github.sha }} - - org.opencontainers.image.source=${{ - fromJson(needs.prepare.outputs.repometa).clone_url }} - - org.opencontainers.image.title=${{ - fromJson(needs.prepare.outputs.repometa).name }} - - org.opencontainers.image.url=${{ - fromJson(needs.prepare.outputs.repometa).html_url }} - - org.opencontainers.image.version=${{ - needs.prepare.outputs.source_version }}" - name: Compress image run: gzip dist/image.tar - name: Upload artifacts - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: dist path: dist @@ -360,28 +306,37 @@ jobs: needs: - diagnostics - build + permissions: + # actions/checkout needs this to fetch code + contents: read runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 with: egress-policy: audit + - id: setup-env + uses: cisagov/setup-env-github-action@develop - uses: actions/checkout@v4 - id: setup-python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "3.11" + python-version: ${{ steps.setup-env.outputs.python-version }} - name: Cache testing environments - uses: actions/cache@v3 + uses: actions/cache@v4 env: BASE_CACHE_KEY: "${{ github.job }}-${{ runner.os }}-\ py${{ steps.setup-python.outputs.python-version }}-" with: - path: ${{ env.PIP_CACHE_DIR }} key: "${{ env.BASE_CACHE_KEY }}\ ${{ hashFiles('**/requirements-test.txt') }}-\ ${{ hashFiles('**/requirements.txt') }}" + path: ${{ env.PIP_CACHE_DIR }} restore-keys: | ${{ env.BASE_CACHE_KEY }} - name: Install dependencies @@ -389,7 +344,7 @@ jobs: python -m pip install --upgrade pip setuptools wheel pip install --upgrade --requirement requirements-test.txt - name: Download docker image artifact - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: dist path: dist @@ -403,12 +358,12 @@ jobs: uses: mxschmitt/action-tmate@v3 if: env.RUN_TMATE build-push-all: - # Builds the final set of images for each of the platforms listed in - # PLATFORMS environment variable. These images are tagged with the Docker - # tags calculated in the "prepare" job and pushed to Docker Hub and the - # GitHub Container Registry. The contents of README.md are pushed as the - # image's description to Docker Hub. This job is skipped when the - # triggering event is a pull request. + # Builds the final set of images for each of the platforms specified in the + # "platforms" input for the docker/build-push-action Action. These images + # are tagged with the Docker tags calculated in the "prepare" job and + # pushed to Docker Hub and the GitHub Container Registry. The contents of + # README.md are pushed as the image's description to Docker Hub. This job + # is skipped when the triggering event is a pull request. if: github.event_name != 'pull_request' name: Build and push all platforms needs: @@ -416,12 +371,18 @@ jobs: - lint - prepare - test - # When Dependabot creates a PR it requires this permission in - # order to push Docker images to ghcr.io. permissions: + # actions/checkout needs this to fetch code + contents: read + # When Dependabot creates a PR it requires this permission in + # order to push Docker images to ghcr.io. packages: write runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -430,14 +391,14 @@ jobs: - name: Login to Docker Hub uses: docker/login-action@v3 with: - username: ${{ secrets.DOCKER_USERNAME }} password: ${{ secrets.DOCKER_PASSWORD }} + username: ${{ secrets.DOCKER_USERNAME }} - name: Login to GitHub Container Registry uses: docker/login-action@v3 with: + password: ${{ secrets.GITHUB_TOKEN }} registry: ghcr.io username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} - name: Checkout uses: actions/checkout@v4 - name: Set up QEMU @@ -445,27 +406,30 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v3 - name: Cache Docker layers - uses: actions/cache@v3 + uses: actions/cache@v4 env: BASE_CACHE_KEY: buildx-${{ runner.os }}- with: - path: ${{ env.BUILDX_CACHE_DIR }} key: ${{ env.BASE_CACHE_KEY }}${{ github.sha }} + path: ${{ env.BUILDX_CACHE_DIR }} restore-keys: | ${{ env.BASE_CACHE_KEY }} - - name: Create cross-platform support Dockerfile-x - run: ./buildx-dockerfile.sh - name: Build and push platform images to registries id: docker_build - uses: docker/build-push-action@v5 + uses: docker/build-push-action@v6 with: - build-args: | - VERSION=${{ needs.prepare.outputs.source_version }} cache-from: type=local,src=${{ env.BUILDX_CACHE_DIR }} cache-to: type=local,dest=${{ env.BUILDX_CACHE_DIR }} context: . - file: ./Dockerfile-x - platforms: ${{ env.PLATFORMS }} + file: ./Dockerfile + labels: ${{ needs.prepare.outputs.labels }} + platforms: | + linux/amd64 + linux/arm/v6 + linux/arm/v7 + linux/arm64 + linux/ppc64le + linux/s390x # Uncomment the following option if you are building an image for use # on Google Cloud Run or AWS Lambda. The current default image output # is unable to run on either. Please see the following issue for more @@ -475,34 +439,15 @@ jobs: tags: ${{ needs.prepare.outputs.tags }} # For a list of pre-defined annotation keys and value types see: # https://github.com/opencontainers/image-spec/blob/master/annotations.md - labels: "\ - org.opencontainers.image.created=${{ - needs.prepare.outputs.created }} - - org.opencontainers.image.description=${{ - fromJson(needs.prepare.outputs.repometa).description }} - - org.opencontainers.image.licenses=${{ - fromJson(needs.prepare.outputs.repometa).license.spdx_id }} - - org.opencontainers.image.revision=${{ github.sha }} - - org.opencontainers.image.source=${{ - fromJson(needs.prepare.outputs.repometa).clone_url }} - - org.opencontainers.image.title=${{ - fromJson(needs.prepare.outputs.repometa).name }} - - org.opencontainers.image.url=${{ - fromJson(needs.prepare.outputs.repometa).html_url }} - - org.opencontainers.image.version=${{ - needs.prepare.outputs.source_version }}" - - name: Publish README.md to Docker Hub - env: - DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }} - DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }} - run: ./push_readme.sh + - if: ${{ github.ref_name == 'develop' && github.event_name == 'push' }} + name: Update the Docker Hub description + uses: peter-evans/dockerhub-description@v4 + with: + password: ${{ secrets.DOCKER_PASSWORD }} + readme-filepath: README.md + repository: ${{ env.IMAGE_NAME }} + short-description: ${{ github.event.repository.description }} + username: ${{ secrets.DOCKER_USERNAME }} - name: Setup tmate debug session uses: mxschmitt/action-tmate@v3 if: env.RUN_TMATE diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index dc49271..642adda 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -7,25 +7,36 @@ name: CodeQL on: + merge_group: + types: + - checks_requested + pull_request: + # The branches here must be a subset of the ones in the push key + branches: + - develop push: # Dependabot triggered push events have read-only access, but uploading code # scanning requires write access. branches-ignore: - dependabot/** - pull_request: - # The branches below must be a subset of the branches above - branches: - - develop schedule: - cron: '0 21 * * 6' jobs: diagnostics: name: Run diagnostics + # This job does not need any permissions + permissions: {} runs-on: ubuntu-latest steps: # Note that a duplicate of this step must be added at the top of # each job. + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + # Note that a duplicate of this step must be added at the top of + # each job. - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -33,7 +44,7 @@ jobs: egress-policy: audit - id: github-status name: Check GitHub status - uses: crazy-max/ghaction-github-status@v3 + uses: crazy-max/ghaction-github-status@v4 - id: dump-context name: Dump context uses: crazy-max/ghaction-dump-context@v2 @@ -43,6 +54,8 @@ jobs: - diagnostics runs-on: ubuntu-latest permissions: + # actions/checkout needs this to fetch code + contents: read # required for all workflows security-events: write strategy: @@ -56,6 +69,10 @@ jobs: # https://docs.github.com/en/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#overriding-automatic-language-detection steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} - id: harden-runner name: Harden the runner uses: step-security/harden-runner@v2 @@ -67,7 +84,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} @@ -75,7 +92,7 @@ jobs: # Java). If this step fails, then you should remove it and run the build # manually (see below). - name: Autobuild - uses: github/codeql-action/autobuild@v2 + uses: github/codeql-action/autobuild@v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -89,4 +106,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/sync-labels.yml b/.github/workflows/sync-labels.yml index 44e8e19..0005147 100644 --- a/.github/workflows/sync-labels.yml +++ b/.github/workflows/sync-labels.yml @@ -4,14 +4,42 @@ name: sync-labels on: push: paths: - - '.github/labels.yml' - - '.github/workflows/sync-labels.yml' + - .github/labels.yml + - .github/workflows/sync-labels.yml + workflow_dispatch: permissions: contents: read jobs: + diagnostics: + name: Run diagnostics + # This job does not need any permissions + permissions: {} + runs-on: ubuntu-latest + steps: + # Note that a duplicate of this step must be added at the top of + # each job. + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + # Note that a duplicate of this step must be added at the top of + # each job. + - id: harden-runner + name: Harden the runner + uses: step-security/harden-runner@v2 + with: + egress-policy: audit + - id: github-status + name: Check GitHub status + uses: crazy-max/ghaction-github-status@v4 + - id: dump-context + name: Dump context + uses: crazy-max/ghaction-dump-context@v2 labeler: + needs: + - diagnostics permissions: # actions/checkout needs this to fetch code contents: read @@ -19,6 +47,15 @@ jobs: issues: write runs-on: ubuntu-latest steps: + - uses: GitHubSecurityLab/actions-permissions/monitor@v1 + with: + # Uses the organization variable unless overridden + config: ${{ vars.ACTIONS_PERMISSIONS_CONFIG }} + - id: harden-runner + name: Harden the runner + uses: step-security/harden-runner@v2 + with: + egress-policy: audit - uses: actions/checkout@v4 - name: Sync repository labels if: success() diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79894a8..4d0c4a9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -4,22 +4,30 @@ default_language_version: python: python3 repos: + # Check the pre-commit configuration + - repo: meta + hooks: + - id: check-useless-excludes + - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 + rev: v5.0.0 hooks: - id: check-case-conflict - id: check-executables-have-shebangs - id: check-json - id: check-merge-conflict + - id: check-shebang-scripts-are-executable + - id: check-symlinks - id: check-toml + - id: check-vcs-permalinks - id: check-xml - id: debug-statements + - id: destroyed-symlinks - id: detect-aws-credentials args: - --allow-missing-credentials - id: detect-private-key - id: end-of-file-fixer - exclude: files/(issue|motd) - id: mixed-line-ending args: - --fix=lf @@ -31,17 +39,17 @@ repos: # Text file hooks - repo: https://github.com/igorshubovych/markdownlint-cli - rev: v0.36.0 + rev: v0.42.0 hooks: - id: markdownlint args: - --config=.mdl_config.yaml - - repo: https://github.com/pre-commit/mirrors-prettier - rev: v3.0.3 + - repo: https://github.com/rbubley/mirrors-prettier + rev: v3.3.3 hooks: - id: prettier - repo: https://github.com/adrienverge/yamllint - rev: v1.32.0 + rev: v1.35.1 hooks: - id: yamllint args: @@ -49,14 +57,14 @@ repos: # GitHub Actions hooks - repo: https://github.com/python-jsonschema/check-jsonschema - rev: 0.26.3 + rev: 0.29.4 hooks: - id: check-github-actions - id: check-github-workflows # pre-commit hooks - repo: https://github.com/pre-commit/pre-commit - rev: v3.4.0 + rev: v4.0.1 hooks: - id: validate_manifest @@ -64,21 +72,25 @@ repos: - repo: https://github.com/TekWizely/pre-commit-golang rev: v1.0.0-rc.1 hooks: - # Style Checkers - - id: go-critic - # StaticCheck - - id: go-staticcheck-repo-mod # Go Build - id: go-build-repo-mod + # Style Checkers + - id: go-critic + # goimports + - id: go-imports-repo + args: + # Write changes to files + - -w # Go Mod Tidy - id: go-mod-tidy-repo + # GoSec + - id: go-sec-repo-mod + # StaticCheck + - id: go-staticcheck-repo-mod # Go Test - id: go-test-repo-mod # Go Vet - id: go-vet-repo-mod - # GoSec - - id: go-sec-repo-mod - # Nix hooks - repo: https://github.com/nix-community/nixpkgs-fmt rev: v1.3.0 @@ -86,29 +98,33 @@ repos: - id: nixpkgs-fmt # Shell script hooks - - repo: https://github.com/cisagov/pre-commit-shfmt - rev: v0.0.2 + - repo: https://github.com/scop/pre-commit-shfmt + rev: v3.10.0-1 hooks: - id: shfmt args: + # List files that will be formatted + - --list + # Write result to file instead of stdout + - --write # Indent by two spaces - - -i - - '2' + - --indent + - "2" # Binary operators may start a line - - -bn + - --binary-next-line # Switch cases are indented - - -ci + - --case-indent # Redirect operators are followed by a space - - -sr - - repo: https://github.com/detailyang/pre-commit-shell - rev: 1.0.5 + - --space-redirects + - repo: https://github.com/shellcheck-py/shellcheck-py + rev: v0.10.0.1 hooks: - - id: shell-lint + - id: shellcheck # Python hooks # Run bandit on the "tests" tree with a configuration - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 + rev: 1.7.10 hooks: - id: bandit name: bandit (tests tree) @@ -117,44 +133,93 @@ repos: - --config=.bandit.yml # Run bandit on everything except the "tests" tree - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 + rev: 1.7.10 hooks: - id: bandit name: bandit (everything else) exclude: tests - repo: https://github.com/psf/black-pre-commit-mirror - rev: 23.9.1 + rev: 24.10.0 hooks: - id: black - repo: https://github.com/PyCQA/flake8 - rev: 6.1.0 + rev: 7.1.1 hooks: - id: flake8 additional_dependencies: - - flake8-docstrings + - flake8-docstrings==1.7.0 - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + rev: 5.13.2 hooks: - id: isort - repo: https://github.com/pre-commit/mirrors-mypy - rev: v1.5.1 + rev: v1.13.0 hooks: - id: mypy + - repo: https://github.com/pypa/pip-audit + rev: v2.7.3 + hooks: + - id: pip-audit + args: + # Add any pip requirements files to scan + - --requirement + - requirements-dev.txt + - --requirement + - requirements-test.txt + - --requirement + - requirements.txt - repo: https://github.com/asottile/pyupgrade - rev: v3.10.1 + rev: v3.19.0 hooks: - id: pyupgrade # Ansible hooks - repo: https://github.com/ansible/ansible-lint - rev: v6.19.0 + rev: v24.10.0 hooks: - id: ansible-lint - # files: molecule/default/playbook.yml + additional_dependencies: + # On its own ansible-lint does not pull in ansible, only + # ansible-core. Therefore, if an Ansible module lives in + # ansible instead of ansible-core, the linter will complain + # that the module is unknown. In these cases it is + # necessary to add the ansible package itself as an + # additional dependency, with the same pinning as is done in + # requirements-test.txt of cisagov/skeleton-ansible-role. + # + # Version 10 is required because the pip-audit pre-commit + # hook identifies a vulnerability in ansible-core 2.16.13, + # but all versions of ansible 9 have a dependency on + # ~=2.16.X. + # + # It is also a good idea to go ahead and upgrade to version + # 10 since version 9 is going EOL at the end of November: + # https://endoflife.date/ansible + # - ansible>=10,<11 + # ansible-core 2.16.3 through 2.16.6 suffer from the bug + # discussed in ansible/ansible#82702, which breaks any + # symlinked files in vars, tasks, etc. for any Ansible role + # installed via ansible-galaxy. Hence we never want to + # install those versions. + # + # Note that the pip-audit pre-commit hook identifies a + # vulnerability in ansible-core 2.16.13. The pin of + # ansible-core to >=2.17 effectively also pins ansible to + # >=10. + # + # It is also a good idea to go ahead and upgrade to + # ansible-core 2.17 since security support for ansible-core + # 2.16 ends this month: + # https://docs.ansible.com/ansible/devel/reference_appendices/release_and_maintenance.html#ansible-core-support-matrix + # + # Note that any changes made to this dependency must also be + # made in requirements.txt in cisagov/skeleton-packer and + # requirements-test.txt in cisagov/skeleton-ansible-role. + - ansible-core>=2.17 # Terraform hooks - repo: https://github.com/antonbabenko/pre-commit-terraform - rev: v1.83.2 + rev: v1.96.1 hooks: - id: terraform_fmt - id: terraform_validate @@ -167,7 +232,7 @@ repos: # Packer hooks - repo: https://github.com/cisagov/pre-commit-packer - rev: v0.0.2 + rev: v0.3.0 hooks: - - id: packer_validate - id: packer_fmt + - id: packer_validate diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 38b31fd..29add82 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -46,9 +46,13 @@ There are a few ways to do this, but we prefer to use create and manage a Python virtual environment specific to this project. -If you already have `pyenv` and `pyenv-virtualenv` configured you can -take advantage of the `setup-env` tool in this repo to automate the -entire environment configuration process. +We recommend using the `setup-env` script located in this repository, +as it automates the entire environment configuration process. The +dependencies required to run this script are +[GNU `getopt`](https://github.com/util-linux/util-linux/blob/master/misc-utils/getopt.1.adoc), +[`pyenv`](https://github.com/pyenv/pyenv), and [`pyenv-virtualenv`](https://github.com/pyenv/pyenv-virtualenv). +If these tools are already configured on your system, you can simply run the +following command: ```console ./setup-env @@ -57,13 +61,18 @@ entire environment configuration process. Otherwise, follow the steps below to manually configure your environment. -#### Installing and using `pyenv` and `pyenv-virtualenv` #### +#### Installing and using GNU `getopt`, `pyenv`, and `pyenv-virtualenv` #### -On the Mac, we recommend installing [brew](https://brew.sh/). Then -installation is as simple as `brew install pyenv pyenv-virtualenv` and +On macOS, we recommend installing [brew](https://brew.sh/). Then +installation is as simple as `brew install gnu-getopt pyenv pyenv-virtualenv` and adding this to your profile: ```bash +# GNU getopt must be explicitly added to the path since it is +# keg-only (https://docs.brew.sh/FAQ#what-does-keg-only-mean) +export PATH="$(brew --prefix)/opt/gnu-getopt/bin:$PATH" + +# Setup pyenv export PYENV_ROOT="$HOME/.pyenv" export PATH="$PYENV_ROOT/bin:$PATH" eval "$(pyenv init --path)" @@ -71,13 +80,15 @@ eval "$(pyenv init -)" eval "$(pyenv virtualenv-init -)" ``` -For Linux, Windows Subsystem for Linux (WSL), or on the Mac (if you +For Linux, Windows Subsystem for Linux (WSL), or macOS (if you don't want to use `brew`) you can use [pyenv/pyenv-installer](https://github.com/pyenv/pyenv-installer) to install the necessary tools. Before running this ensure that you have installed the prerequisites for your platform according to the [`pyenv` wiki page](https://github.com/pyenv/pyenv/wiki/common-build-problems). +GNU `getopt` is included in most Linux distributions as part of the +[`util-linux`](https://github.com/util-linux/util-linux) package. On WSL you should treat your platform as whatever Linux distribution you've chosen to install. diff --git a/Dockerfile b/Dockerfile index b6ba932..facec56 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,39 +1,19 @@ -ARG VERSION=unspecified -ARG PY_VERSION=3.10.7 - -FROM python:${PY_VERSION}-slim-bullseye AS compile-stage - -ARG VERSION +# Official Docker images are in the form library/ while non-official +# images are in the form /. +FROM docker.io/library/python:3.10.16-slim-bookworm AS compile-stage ### -# For a list of pre-defined annotation keys and value types see: -# https://github.com/opencontainers/image-spec/blob/master/annotations.md -# -# Note: Additional labels are added by the build workflow. +# Unprivileged user variables ### -LABEL org.opencontainers.image.authors="vm-fusion-dev-group@trio.dhs.gov" -LABEL org.opencontainers.image.vendor="Cybersecurity and Infrastructure Security Agency" - -### -# Unprivileged user setup variables -### -ARG CISA_UID=421 -ARG CISA_GID=${CISA_UID} ARG CISA_USER="cisa" -ENV CISA_GROUP=${CISA_USER} ENV CISA_HOME="/home/${CISA_USER}" +ENV VIRTUAL_ENV="${CISA_HOME}/.venv" -### -# Upgrade the system -### -RUN apt-get update --quiet --quiet \ - && apt-get upgrade --quiet --quiet - -### -# Create unprivileged user -### -RUN groupadd --system --gid ${CISA_GID} ${CISA_GROUP} \ - && useradd --system --uid ${CISA_UID} --gid ${CISA_GROUP} --comment "${CISA_USER} user" ${CISA_USER} +# Versions of the Python packages installed directly +ENV PYTHON_PIP_VERSION=25.0.1 +ENV PYTHON_PIPENV_VERSION=2024.4.1 +ENV PYTHON_SETUPTOOLS_VERSION=75.8.0 +ENV PYTHON_WHEEL_VERSION=0.45.1 ### # Install everything we need @@ -41,66 +21,49 @@ RUN groupadd --system --gid ${CISA_GID} ${CISA_GROUP} \ # Install dependencies are only needed for software installation and # will not be included in the final Docker image. ### -ENV DEPS \ - libpq-dev=13.13-0+deb11u1 -# I'd like to pin the version of wget to keep the build reproducible, -# but it's tricky. -# -# I need to use version 1.21-1+b1 of wget for amd64 and version 1.21-1 -# of wget otherwise. -# https://packages.debian.org/bullseye/wget -# -# I presume the solution is to somehow make use of this jazz: -# https://docs.docker.com/engine/reference/builder/#automatic-platform-args-in-the-global-scope) -# -# But I don't see a way to do ternary logic with ENVs in a Dockerfile. -# -# Here is a post from StackOverflow where someone asks a similar -# question: -# https://stackoverflow.com/questions/67596193/building-a-multi-architecture-docker-image-but-dockerfile-requires-different-pa -ENV INSTALL_DEPS \ - wget -RUN apt-get install --quiet --quiet --yes \ - --no-install-recommends --no-install-suggests \ - $DEPS $INSTALL_DEPS +RUN apt-get update --quiet --quiet \ + && apt-get install --quiet --quiet --yes --no-install-recommends --no-install-suggests \ + libpq-dev=15.10-0+deb12u1 ### -# Make sure pip, setuptools, and wheel are the latest versions +# Install the specified versions of pip, setuptools, and wheel into the system +# Python environment; install the specified version of pipenv into the system Python +# environment; set up a Python virtual environment (venv); and install the specified +# versions of pip, setuptools, and wheel into the venv. # -# Note that we use pip3 --no-cache-dir to avoid writing to a local +# Note that we use the --no-cache-dir flag to avoid writing to a local # cache. This results in a smaller final image, at the cost of # slightly longer install times. ### -RUN pip3 install --no-cache-dir --upgrade \ - pip \ - setuptools \ - wheel - -### -# Perform remaining steps as the unprivileged user, from the -# unprivileged user's home directory -### -USER ${CISA_USER}:${CISA_GROUP} -WORKDIR ${CISA_HOME} - -### -# Manually set up the virtual environment -### -ENV PY_VENV=${CISA_HOME}/.venv -RUN python3 -m venv ${PY_VENV} -ENV PATH="${PY_VENV}/bin:$PATH" -# Install/upgrade core Python dependencies RUN python3 -m pip install --no-cache-dir --upgrade \ - pip==21.3.1 \ - setuptools==58.5.3 \ - wheel==0.37.0 - -# Download and install guacscanner -RUN python3 -m pip install --no-cache-dir \ - https://github.com/cisagov/guacscanner/archive/v${VERSION}.tar.gz - + pip==${PYTHON_PIP_VERSION} \ + setuptools==${PYTHON_SETUPTOOLS_VERSION} \ + wheel==${PYTHON_WHEEL_VERSION} \ + && python3 -m pip install --no-cache-dir --upgrade \ + pipenv==${PYTHON_PIPENV_VERSION} \ + # Manually create the virtual environment + && python3 -m venv ${VIRTUAL_ENV} \ + # Ensure the core Python packages are installed in the virtual environment + && ${VIRTUAL_ENV}/bin/python3 -m pip install --no-cache-dir --upgrade \ + pip==${PYTHON_PIP_VERSION} \ + setuptools==${PYTHON_SETUPTOOLS_VERSION} \ + wheel==${PYTHON_WHEEL_VERSION} + +### +# Check the Pipfile configuration and then install the Python dependencies into +# the virtual environment. +# +# Note that pipenv will install into a virtual environment if the VIRTUAL_ENV +# environment variable is set. +### +WORKDIR /tmp +COPY src/Pipfile src/Pipfile.lock ./ +RUN pipenv check --verbose \ + && pipenv install --clear --deploy --extra-pip-args "--no-cache-dir" --verbose -FROM python:${PY_VERSION}-slim-bullseye AS build-stage +# Official Docker images are in the form library/ while non-official +# images are in the form /. +FROM docker.io/library/python:3.10.16-slim-bookworm AS build-stage ### # For a list of pre-defined annotation keys and value types see: @@ -119,12 +82,7 @@ ARG CISA_GID=${CISA_UID} ARG CISA_USER="cisa" ENV CISA_GROUP=${CISA_USER} ENV CISA_HOME="/home/${CISA_USER}" - -### -# Upgrade the system -### -RUN apt-get update --quiet --quiet \ - && apt-get upgrade --quiet --quiet +ENV VIRTUAL_ENV="${CISA_HOME}/.venv" ### # Create unprivileged user @@ -135,27 +93,32 @@ RUN groupadd --system --gid ${CISA_GID} ${CISA_GROUP} \ ### # Install everything we need ### -ENV DEPS \ - libpq-dev=13.13-0+deb11u1 # Note that we clean up aptitude cruft after installing dependencies. # This must be done in one fell swoop to actually reduce the size of # the resulting Docker image: # https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#minimize-the-number-of-layers -RUN apt-get install --quiet --quiet --yes \ - --no-install-recommends --no-install-suggests \ - $DEPS \ +RUN apt-get update --quiet --quiet \ + && apt-get install --quiet --quiet --yes --no-install-recommends --no-install-suggests \ + libpq-dev=15.10-0+deb12u1 \ && apt-get clean \ && rm --recursive --force /var/lib/apt/lists/* -# Manually set up the virtual environment, copying the venv over from -# the compile stage -ENV PY_VENV=${CISA_HOME}/.venv -COPY --from=compile-stage ${CISA_HOME} ${CISA_HOME}/ -ENV PATH="${PY_VENV}/bin:$PATH" +### +# Copy in the Python virtual environment created in compile-stage, symlink the +# Python binary in the venv to the system-wide Python, and add the venv to the PATH. +# +# Note that we symlink the Python binary in the venv to the system-wide Python so that +# any calls to `python3` will use our virtual environment. We are using short flags +# because the ln binary in Alpine Linux does not support long flags. The -f instructs +# ln to remove the existing file and the -s instructs ln to create a symbolic link. +### +COPY --from=compile-stage --chown=${CISA_USER}:${CISA_GROUP} ${VIRTUAL_ENV} ${VIRTUAL_ENV} +RUN ln --force --symbolic "$(command -v python3)" "${VIRTUAL_ENV}"/bin/python3 +ENV PATH="${VIRTUAL_ENV}/bin:$PATH" ### # Prepare to run ### -USER ${CISA_USER}:${CISA_GROUP} WORKDIR ${CISA_HOME} +USER ${CISA_USER}:${CISA_GROUP} ENTRYPOINT ["guacscanner"] diff --git a/README.md b/README.md index 3bdb9c9..b739ed9 100644 --- a/README.md +++ b/README.md @@ -25,16 +25,16 @@ composition](https://docs.docker.com/compose/) alongside only the To run the `cisagov/guacscanner` image via Docker: ```console -docker run cisagov/guacscanner:1.1.18 +docker run cisagov/guacscanner:1.1.18+build.1 ``` ### Running with Docker Compose ### See [cisagov/guacamole-composition](https://github.com/cisagov/guacamole-composition)) -for an example of how to create a `docker-compose.yml` file to use +for an example of how to create a `compose.yml` file to use [Docker Compose](https://docs.docker.com/compose/). With a -`docker-compose.yml` file in hand, one need only start the container +`compose.yml` file in hand, one need only start the container and detach: ```console @@ -51,7 +51,7 @@ environment variables. See the Again, see [cisagov/guacamole-composition](https://github.com/cisagov/guacamole-composition)) -for an example of how to create a `docker-compose.yml` file that uses +for an example of how to create a `compose.yml` file that uses Docker secrets. ## Updating your container ## @@ -82,22 +82,51 @@ Docker secrets. 1. Pull the new image: ```console - docker pull cisagov/guacscanner:1.1.18 + docker pull cisagov/guacscanner:1.1.18+build.1 ``` 1. Recreate and run the container by following the [previous instructions](#running-with-docker). +## Updating Python dependencies ## + +This image uses [Pipenv] to manage Python dependencies using a [Pipfile](https://github.com/pypa/pipfile). +Both updating dependencies and changing the [Pipenv] configuration in `src/Pipfile` +will result in a modified `src/Pipfile.lock` file that should be committed to the +repository. + +> [!WARNING] +> The `src/Pipfile.lock` as generated will fail `pre-commit` checks due to JSON formatting. + +### Updating dependencies ### + +If you want to update existing dependencies you would run the following command +in the `src/` subdirectory: + +```console +pipenv lock +``` + +### Modifying dependencies ### + +If you want to add or remove dependencies you would update the `src/Pipfile` file +and then update dependencies as you would above. + +> [!NOTE] +> You should only specify packages that are direct requirements of +> your Docker configuration. Allow [Pipenv] to manage the dependencies +> of the specified packages. + ## Image tags ## The images of this container are tagged with [semantic versions](https://semver.org) of the underlying example project that they containerize. It is recommended that most users use a version -tag (e.g. `:1.1.18`). +tag (e.g. `:1.1.18+build.1`). | Image:tag | Description | |-----------|-------------| -|`cisagov/guacscanner:1.1.18`| An exact release version. | +|`cisagov/guacscanner:1.1.18+build.1`| An exact release version. | |`cisagov/guacscanner:1.1`| The most recent release matching the major and minor version numbers. | |`cisagov/guacscanner:1`| The most recent release matching the major version number. | |`cisagov/guacscanner:edge` | The most recent image built from a merge into the `develop` branch of this repository. | @@ -128,7 +157,7 @@ No ports are exposed by this container. --> @@ -173,8 +202,8 @@ Build the image locally using this git repository as the [build context](https:/ ```console docker build \ - --build-arg VERSION=1.1.18 \ - --tag cisagov/guacscanner:1.1.18 \ + --build-arg VERSION=1.1.18+build.1 \ + --tag cisagov/guacscanner:1.1.18+build.1 \ https://github.com/cisagov/guacscanner.git#develop ``` @@ -204,9 +233,9 @@ Docker: docker buildx build \ --file Dockerfile-x \ --platform linux/amd64 \ - --build-arg VERSION=1.1.18 \ + --build-arg VERSION=1.1.18+build.1 \ --output type=docker \ - --tag cisagov/guacscanner:1.1.18 . + --tag cisagov/guacscanner:1.1.18+build.1 . ``` ## Contributing ## @@ -226,3 +255,5 @@ dedication](https://creativecommons.org/publicdomain/zero/1.0/). All contributions to this project will be released under the CC0 dedication. By submitting a pull request, you are agreeing to comply with this waiver of copyright interest. + +[Pipenv]: https://pypi.org/project/pipenv/ diff --git a/buildx-dockerfile.sh b/buildx-dockerfile.sh deleted file mode 100755 index 46710e9..0000000 --- a/buildx-dockerfile.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env bash - -# Create a Dockerfile suitable for a multi-platform build using buildx -# See: https://docs.docker.com/buildx/working-with-buildx/ - -set -o nounset -set -o errexit -set -o pipefail - -DOCKERFILE=Dockerfile -DOCKERFILEX=Dockerfile-x - -# We don't want this expression to expand. -# shellcheck disable=SC2016 -sed 's/^FROM /FROM --platform=$TARGETPLATFORM /g' < $DOCKERFILE > $DOCKERFILEX diff --git a/bump-version b/bump-version new file mode 100755 index 0000000..324fea9 --- /dev/null +++ b/bump-version @@ -0,0 +1,172 @@ +#!/usr/bin/env bash + +# bump-version [--push] [--label LABEL] (major | minor | patch | prerelease | build | finalize | show) +# bump-version --list-files + +set -o nounset +set -o errexit +set -o pipefail + +# Stores the canonical version for the project. +VERSION_FILE=src/version.txt +# Files that should be updated with the new version. +VERSION_FILES=("$VERSION_FILE" README.md) + +USAGE=$( + cat << END_OF_LINE +Update the version of the project. + +Usage: + ${0##*/} [--push] [--label LABEL] (major | minor | patch | prerelease | build | finalize | show) + ${0##*/} --list-files + ${0##*/} (-h | --help) + +Options: + -h | --help Show this message. + --push Perform a \`git push\` after updating the version. + --label LABEL Specify the label to use when updating the build or prerelease version. + --list-files List the files that will be updated when the version is bumped. +END_OF_LINE +) + +old_version=$(< "$VERSION_FILE") +# Comment out periods so they are interpreted as periods and don't +# just match any character +old_version_regex=${old_version//\./\\\.} +new_version="$old_version" + +bump_part="" +label="" +commit_prefix="Bump" +with_push=false +commands_with_label=("build" "prerelease") +commands_with_prerelease=("major" "minor" "patch") +with_prerelease=false + +####################################### +# Display an error message, the help information, and exit with a non-zero status. +# Arguments: +# Error message. +####################################### +function invalid_option() { + echo "$1" + echo "$USAGE" + exit 1 +} + +####################################### +# Bump the version using the provided command. +# Arguments: +# The version to bump. +# The command to bump the version. +# Returns: +# The new version. +####################################### +function bump_version() { + local temp_version + temp_version=$(python -c "import semver; print(semver.parse_version_info('$1').${2})") + echo "$temp_version" +} + +if [ $# -eq 0 ]; then + echo "$USAGE" + exit 1 +else + while [ $# -gt 0 ]; do + case $1 in + --push) + if [ "$with_push" = true ]; then + invalid_option "Push has already been set." + fi + + with_push=true + shift + ;; + --label) + if [ -n "$label" ]; then + invalid_option "Label has already been set." + fi + + label="$2" + shift 2 + ;; + build | finalize | major | minor | patch) + if [ -n "$bump_part" ]; then + invalid_option "Only one version part should be bumped at a time." + fi + + bump_part="$1" + shift + ;; + prerelease) + with_prerelease=true + shift + ;; + show) + echo "$old_version" + exit 0 + ;; + -h | --help) + echo "$USAGE" + exit 0 + ;; + --list-files) + printf '%s\n' "${VERSION_FILES[@]}" + exit 0 + ;; + *) + invalid_option "Invalid option: $1" + ;; + esac + done +fi + +if [ -n "$label" ] && [ "$with_prerelease" = false ] && [[ ! " ${commands_with_label[*]} " =~ [[:space:]]${bump_part}[[:space:]] ]]; then + invalid_option "Setting the label is only allowed for the following commands: ${commands_with_label[*]}" +fi + +if [ "$with_prerelease" = true ] && [ -n "$bump_part" ] && [[ ! " ${commands_with_prerelease[*]} " =~ [[:space:]]${bump_part}[[:space:]] ]]; then + invalid_option "Changing the prerelease is only allowed in conjunction with the following commands: ${commands_with_prerelease[*]}" +fi + +label_option="" +if [ -n "$label" ]; then + label_option="token='$label'" +fi + +if [ -n "$bump_part" ]; then + if [ "$bump_part" = "finalize" ]; then + commit_prefix="Finalize" + bump_command="finalize_version()" + elif [ "$bump_part" = "build" ]; then + bump_command="bump_${bump_part}($label_option)" + else + bump_command="bump_${bump_part}()" + fi + new_version=$(bump_version "$old_version" "$bump_command") + echo Changing version from "$old_version" to "$new_version" +fi + +if [ "$with_prerelease" = true ]; then + bump_command="bump_prerelease($label_option)" + temp_version=$(bump_version "$new_version" "$bump_command") + echo Changing version from "$new_version" to "$temp_version" + new_version="$temp_version" +fi + +tmp_file=/tmp/version.$$ +for version_file in "${VERSION_FILES[@]}"; do + if [ ! -f "$version_file" ]; then + echo Missing expected file: "$version_file" + exit 1 + fi + sed "s/$old_version_regex/$new_version/" "$version_file" > $tmp_file + mv $tmp_file "$version_file" +done + +git add "${VERSION_FILES[@]}" +git commit --message "$commit_prefix version from $old_version to $new_version" + +if [ "$with_push" = true ]; then + git push +fi diff --git a/bump_version.sh b/bump_version.sh deleted file mode 100755 index 0071670..0000000 --- a/bump_version.sh +++ /dev/null @@ -1,81 +0,0 @@ -#!/usr/bin/env bash - -# Usage: -# bump_version.sh (show|major|minor|patch|finalize) -# bump_version.sh (build|prerelease) [token] -# Notes: -# - If you specify a token it will only be used if the current version is -# tokenless or if the provided token matches the token used in the current -# version. - -set -o nounset -set -o errexit -set -o pipefail - -VERSION_FILE=src/version.txt -README_FILE=README.md - -function usage { - cat << HELP -Usage: - ${0##*/} (show|major|minor|patch|finalize) - ${0##*/} (build|prerelease) [token] - -Notes: - - If you specify a token it will only be used if the current version is - tokenless or if the provided token matches the token used in the current - version. -HELP - exit 1 -} - -function update_version { - # Comment out periods so they are interpreted as periods and don't - # just match any character - old_version_regex=${1//\./\\\.} - - echo Changing version from "$1" to "$2" - tmp_file=/tmp/version.$$ - sed "s/$old_version_regex/$2/" $VERSION_FILE > $tmp_file - mv $tmp_file $VERSION_FILE - sed "s/$old_version_regex/$2/" $README_FILE > $tmp_file - mv $tmp_file $README_FILE - git add $VERSION_FILE $README_FILE - git commit --message "$3" -} - -if [ $# -lt 1 ] || [ $# -gt 2 ]; then - usage -else - old_version=$(sed -n "s/^__version__ = \"\(.*\)\"$/\1/p" $VERSION_FILE) - case $1 in - major | minor | patch) - if [ $# -ne 1 ]; then - usage - fi - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))") - update_version "$old_version" "$new_version" "Bump version from $old_version to $new_version" - ;; - build | prerelease) - if [ $# -eq 2 ]; then - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version', token='$2'))") - else - new_version=$(python -c "import semver; print(semver.bump_$1('$old_version'))") - fi - update_version "$old_version" "$new_version" "Bump version from $old_version to $new_version" - ;; - finalize) - if [ $# -ne 1 ]; then - usage - fi - new_version=$(python -c "import semver; print(semver.finalize_version('$old_version'))") - update_version "$old_version" "$new_version" "Finalize version from $old_version to $new_version" - ;; - show) - echo "$old_version" - ;; - *) - usage - ;; - esac -fi diff --git a/docker-compose.yml b/compose.yml similarity index 98% rename from docker-compose.yml rename to compose.yml index ab32e30..0d8e112 100644 --- a/docker-compose.yml +++ b/compose.yml @@ -1,8 +1,8 @@ --- -version: "3.7" - # This Docker composition file is used to build and test the container +name: guacscanner + secrets: postgres_password: file: ./src/secrets/postgres-password @@ -52,7 +52,7 @@ services: guacscanner-version: # Run the container to collect version information + command: --version image: cisagov/guacscanner init: true restart: "no" - command: --version diff --git a/push_readme.sh b/push_readme.sh deleted file mode 100755 index 29b12aa..0000000 --- a/push_readme.sh +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env bash - -# Push the README.md file to the Docker Hub repository - -# Requires the following environment variables to be set: -# DOCKER_PASSWORD, DOCKER_USERNAME, IMAGE_NAME - -set -o nounset -set -o errexit -set -o pipefail - -echo "Logging in and requesting JWT..." -token=$(curl --silent --request POST \ - --header "Content-Type: application/json" \ - --data \ - '{"username": "'"$DOCKER_USERNAME"'", "password": "'"$DOCKER_PASSWORD"'"}' \ - https://hub.docker.com/v2/users/login/ | jq --raw-output .token) - -echo "Pushing README file..." -code=$(jq --null-input --arg msg "$(< README.md)" \ - '{"registry":"registry-1.docker.io","full_description": $msg }' \ - | curl --silent --output /dev/null --location --write-out "%{http_code}" \ - https://hub.docker.com/v2/repositories/"${IMAGE_NAME}"/ \ - --data @- --request PATCH \ - --header "Content-Type: application/json" \ - --header "Authorization: JWT ${token}") - -if [[ "${code}" = "200" ]]; then - printf "Successfully pushed README to Docker Hub" -else - printf "Unable to push README to Docker Hub, response code: %s\n" "${code}" - exit 1 -fi diff --git a/requirements-dev.txt b/requirements-dev.txt index cb51627..8d26d98 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,3 +1,3 @@ --requirement requirements-test.txt ipython -semver +pipenv diff --git a/requirements-test.txt b/requirements-test.txt index 8b41b2f..16c2f58 100644 --- a/requirements-test.txt +++ b/requirements-test.txt @@ -2,3 +2,5 @@ pre-commit pytest python-on-whales +# The bump-version script requires at least version 3 of semver. +semver>=3 diff --git a/setup-env b/setup-env index 77926bf..059ccad 100755 --- a/setup-env +++ b/setup-env @@ -9,60 +9,122 @@ USAGE=$( Configure a development environment for this repository. It does the following: + - Allows the user to specify the Python version to use for the virtual environment. + - Allows the user to specify a name for the virtual environment. - Verifies pyenv and pyenv-virtualenv are installed. - - Creates a Python virtual environment. + - Creates the Python virtual environment. - Configures the activation of the virtual enviroment for the repo directory. - Installs the requirements needed for development. - Installs git pre-commit hooks. - - Configures git upstream remote "lineage" repositories. + - Configures git remotes for upstream "lineage" repositories. Usage: - setup-env [options] [virt_env_name] + setup-env [--venv-name venv_name] [--python-version python_version] setup-env (-h | --help) Options: - -f --force Delete virtual enviroment if it already exists. - -h --help Show this message. - -i --install-hooks Install hook environments for all environments in the - pre-commit config file. + -f | --force Delete virtual enviroment if it already exists. + -h | --help Show this message. + -i | --install-hooks Install hook environments for all environments in the + pre-commit config file. + -l | --list-versions List available Python versions and select one interactively. + -v | --venv-name Specify the name of the virtual environment. + -p | --python-version Specify the Python version for the virtual environment. END_OF_LINE ) +# Display pyenv's installed Python versions +python_versions() { + pyenv versions --bare --skip-aliases --skip-envs +} + +check_python_version() { + local version=$1 + + # This is a valid regex for semantically correct Python version strings. + # For more information see here: + # https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string + # Break down the regex into readable parts major.minor.patch + local major="0|[1-9]\d*" + local minor="0|[1-9]\d*" + local patch="0|[1-9]\d*" + + # Splitting the prerelease part for readability + # Start of the prerelease + local prerelease="(?:-" + # Numeric or alphanumeric identifiers + local prerelease+="(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)" + # Additional dot-separated identifiers + local prerelease+="(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*" + # End of the prerelease, making it optional + local prerelease+=")?" + # Optional build metadata + local build="(?:\+([0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?" + + # Final regex composed of parts + local regex="^($major)\.($minor)\.($patch)$prerelease$build$" + + # This checks if the Python version does not match the regex pattern specified in $regex, + # using Perl for regex matching. If the pattern is not found, then prompt the user with + # the invalid version message. + if ! echo "$version" | perl -ne "exit(!/$regex/)"; then + echo "Invalid version of Python: Python follows semantic versioning," \ + "so any version string that is not a valid semantic version is an" \ + "invalid version of Python." + exit 1 + # Else if the Python version isn't installed then notify the user. + # grep -E is used for searching through text lines that match the specific verison. + elif ! python_versions | grep -E "^${version}$" > /dev/null; then + echo "Error: Python version $version is not installed." + echo "Installed Python versions are:" + python_versions + exit 1 + else + echo "Using Python version $version" + fi +} + # Flag to force deletion and creation of virtual environment FORCE=0 -# Positional parameters -PARAMS="" +# Initialize the other flags +INSTALL_HOOKS=0 +LIST_VERSIONS=0 +PYTHON_VERSION="" +VENV_NAME="" -# Parse command line arguments -while (("$#")); do - case "$1" in - -f | --force) - FORCE=1 - shift - ;; - -h | --help) - echo "${USAGE}" - exit 0 - ;; - -i | --install-hooks) - INSTALL_HOOKS=1 - shift - ;; - -*) # unsupported flags - echo "Error: Unsupported flag $1" >&2 - exit 1 - ;; - *) # preserve positional arguments - PARAMS="$PARAMS $1" - shift - ;; - esac -done +# Define long options +LONGOPTS="force,help,install-hooks,list-versions,python-version:,venv-name:" + +# Define short options for getopt +SHORTOPTS="fhilp:v:" + +# Check for GNU getopt by matching a specific pattern ("getopt from util-linux") +# in its version output. This approach presumes the output format remains stable. +# Be aware that format changes could invalidate this check. +if [[ $(getopt --version 2> /dev/null) != *"getopt from util-linux"* ]]; then + cat << 'END_OF_LINE' -# set positional arguments in their proper place -eval set -- "$PARAMS" + Please note, this script requires GNU getopt due to its enhanced + functionality and compatibility with certain script features that + are not supported by the POSIX getopt found in some systems, particularly + those with a non-GNU version of getopt. This distinction is crucial + as a system might have a non-GNU version of getopt installed by default, + which could lead to unexpected behavior. + + On macOS, we recommend installing brew (https://brew.sh/). Then installation + is as simple as `brew install gnu-getopt` and adding this to your + profile: + + export PATH="$(brew --prefix)/opt/gnu-getopt/bin:$PATH" + + GNU getopt must be explicitly added to the PATH since it + is keg-only (https://docs.brew.sh/FAQ#what-does-keg-only-mean). + +END_OF_LINE + exit 1 +fi # Check to see if pyenv is installed if [ -z "$(command -v pyenv)" ] || { [ -z "$(command -v pyenv-virtualenv)" ] && [ ! -f "$(pyenv root)/plugins/pyenv-virtualenv/bin/pyenv-virtualenv" ]; }; then @@ -70,7 +132,7 @@ if [ -z "$(command -v pyenv)" ] || { [ -z "$(command -v pyenv-virtualenv)" ] && if [[ "$OSTYPE" == "darwin"* ]]; then cat << 'END_OF_LINE' - On the Mac, we recommend installing brew, https://brew.sh/. Then installation + On macOS, we recommend installing brew, https://brew.sh/. Then installation is as simple as `brew install pyenv pyenv-virtualenv` and adding this to your profile: @@ -81,7 +143,7 @@ END_OF_LINE fi cat << 'END_OF_LINE' - For Linux, Windows Subsystem for Linux (WSL), or on the Mac (if you don't want + For Linux, Windows Subsystem for Linux (WSL), or macOS (if you don't want to use "brew") you can use https://github.com/pyenv/pyenv-installer to install the necessary tools. Before running this ensure that you have installed the prerequisites for your platform according to the pyenv wiki page, @@ -100,16 +162,72 @@ END_OF_LINE exit 1 fi -set +o nounset +# Use GNU getopt to parse options +if ! PARSED=$(getopt --options $SHORTOPTS --longoptions $LONGOPTS --name "$0" -- "$@"); then + echo "Error parsing options" + exit 1 +fi +eval set -- "$PARSED" + +while true; do + case "$1" in + -f | --force) + FORCE=1 + shift + ;; + -h | --help) + echo "$USAGE" + exit 0 + ;; + -i | --install-hooks) + INSTALL_HOOKS=1 + shift + ;; + -l | --list-versions) + LIST_VERSIONS=1 + shift + ;; + -p | --python-version) + PYTHON_VERSION="$2" + shift 2 + # Check the Python version being passed in. + check_python_version "$PYTHON_VERSION" + ;; + -v | --venv-name) + VENV_NAME="$2" + shift 2 + ;; + --) + shift + break + ;; + *) + # Unreachable due to GNU getopt handling all options + echo "Programming error" + exit 64 + ;; + esac +done + # Determine the virtual environment name -if [ "$1" ]; then +if [ -n "$VENV_NAME" ]; then # Use the user-provided environment name - env_name=$1 + env_name="$VENV_NAME" else # Set the environment name to the last part of the working directory. env_name=${PWD##*/} fi -set -o nounset + +# List Python versions and select one interactively. +if [ $LIST_VERSIONS -ne 0 ]; then + echo Available Python versions: + python_versions + # Read the user's desired Python version. + # -r: treat backslashes as literal, -p: display prompt before input. + read -r -p "Enter the desired Python version: " PYTHON_VERSION + # Check the Python version being passed in. + check_python_version "$PYTHON_VERSION" +fi # Remove any lingering local configuration. if [ $FORCE -ne 0 ]; then @@ -118,7 +236,7 @@ if [ $FORCE -ne 0 ]; then elif [[ -f .python-version ]]; then cat << 'END_OF_LINE' An existing .python-version file was found. Either remove this file yourself - or re-run with --force option to have it deleted along with the associated + or re-run with the --force option to have it deleted along with the associated virtual environment. rm .python-version @@ -128,10 +246,18 @@ END_OF_LINE fi # Create a new virtual environment for this project -if ! pyenv virtualenv "${env_name}"; then +# +# If $PYTHON_VERSION is undefined then the current pyenv Python version will be used. +# +# We can't quote ${PYTHON_VERSION:=} below since if the variable is +# undefined then we want nothing to appear; this is the reason for the +# "shellcheck disable" line below. +# +# shellcheck disable=SC2086 +if ! pyenv virtualenv ${PYTHON_VERSION:=} "${env_name}"; then cat << END_OF_LINE An existing virtual environment named $env_name was found. Either delete this - environment yourself or re-run with --force option to have it deleted. + environment yourself or re-run with the --force option to have it deleted. pyenv virtualenv-delete ${env_name} diff --git a/src/Pipfile b/src/Pipfile new file mode 100644 index 0000000..b5fdb46 --- /dev/null +++ b/src/Pipfile @@ -0,0 +1,13 @@ +[[source]] +url = "https://pypi.org/simple" +verify_ssl = true +name = "pypi" + +# List any Python dependencies for the image here +[packages] +# This should match the version of the image +guacscanner = {file = "https://github.com/cisagov/guacscanner/archive/v1.1.18.tar.gz"} + +# This version should match the version of Python in the image +[requires] +python_full_version = "3.10.16" diff --git a/src/Pipfile.lock b/src/Pipfile.lock new file mode 100644 index 0000000..46ad49f --- /dev/null +++ b/src/Pipfile.lock @@ -0,0 +1,247 @@ +{ + "_meta": { + "hash": { + "sha256": "dba417d6f5774c408528ee621db5ece4c5913b1c56eaebb6ff6e9699e12f83d9" + }, + "pipfile-spec": 6, + "requires": { + "python_full_version": "3.10.16" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "boto3": { + "hashes": [ + "sha256:9b6903fe9cc92d2f6111db28675263f1ab45adbcf1483025c82a304ce7790b71", + "sha256:f2ce641957c1782e382548ced4a447189e45851bbe58c1f6752ff2b661527de7" + ], + "markers": "python_version >= '3.6'", + "version": "==1.21.8" + }, + "botocore": { + "hashes": [ + "sha256:663d8f02b98641846eb959c54c840cc33264d5f2dee5b8fc09ee8adbef0f8dcf", + "sha256:89a203bba3c8f2299287e48a9e112e2dbe478cf67eaac26716f0e7f176446146" + ], + "markers": "python_version >= '3.6'", + "version": "==1.24.46" + }, + "certifi": { + "hashes": [ + "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651", + "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe" + ], + "markers": "python_version >= '3.6'", + "version": "==2025.1.31" + }, + "charset-normalizer": { + "hashes": [ + "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537", + "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa", + "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a", + "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294", + "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b", + "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd", + "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601", + "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd", + "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4", + "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d", + "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2", + "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313", + "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd", + "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa", + "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8", + "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1", + "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2", + "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496", + "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d", + "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b", + "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e", + "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a", + "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4", + "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca", + "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78", + "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408", + "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5", + "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3", + "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f", + "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a", + "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765", + "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6", + "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146", + "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6", + "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9", + "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd", + "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c", + "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f", + "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545", + "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176", + "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770", + "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824", + "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f", + "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf", + "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487", + "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d", + "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd", + "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b", + "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534", + "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f", + "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b", + "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9", + "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd", + "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125", + "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9", + "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de", + "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11", + "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d", + "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35", + "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f", + "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda", + "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7", + "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a", + "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971", + "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8", + "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41", + "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d", + "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f", + "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757", + "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a", + "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886", + "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77", + "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76", + "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247", + "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85", + "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb", + "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7", + "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e", + "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6", + "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037", + "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1", + "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e", + "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807", + "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407", + "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c", + "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12", + "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3", + "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089", + "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd", + "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e", + "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00", + "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616" + ], + "markers": "python_version >= '3.7'", + "version": "==3.4.1" + }, + "contextlib2": { + "hashes": [ + "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f", + "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869" + ], + "markers": "python_version >= '3.6'", + "version": "==21.6.0" + }, + "docopt": { + "hashes": [ + "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491" + ], + "version": "==0.6.2" + }, + "ec2-metadata": { + "hashes": [ + "sha256:422dcf54543b500eec78ffa0c5524c6a1133440cc905b344e9fcbe176266fbb6", + "sha256:6e41d506cfcd43aa07d5ab2f9a77cb585141ffafa96f588027060722115ad5b0" + ], + "markers": "python_version >= '3.7'", + "version": "==2.7.0" + }, + "guacscanner": { + "file": "https://github.com/cisagov/guacscanner/archive/v1.1.18.tar.gz" + }, + "idna": { + "hashes": [ + "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", + "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3" + ], + "markers": "python_version >= '3.6'", + "version": "==3.10" + }, + "jmespath": { + "hashes": [ + "sha256:b85d0567b8666149a93172712e68920734333c0ce7e89b78b3e987f71e5ed4f9", + "sha256:cdf6525904cc597730141d61b36f2e4b8ecc257c420fa2f4549bac2c2d0cb72f" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2'", + "version": "==0.10.0" + }, + "psycopg": { + "hashes": [ + "sha256:801ea3fa3ea7f562506dce6f0121d652b2525073294c7c08ce1d4250f6614405", + "sha256:9f72c6bdccfdab405c654ea2e9ed2293a3e9f993f66a37621f796732ce8e1855" + ], + "markers": "python_version >= '3.6'", + "version": "==3.0.9" + }, + "python-dateutil": { + "hashes": [ + "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", + "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==2.9.0.post0" + }, + "requests": { + "hashes": [ + "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", + "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6" + ], + "markers": "python_version >= '3.8'", + "version": "==2.32.3" + }, + "s3transfer": { + "hashes": [ + "sha256:7a6f4c4d1fdb9a2b640244008e142cbc2cd3ae34b386584ef044dd0f27101971", + "sha256:95c58c194ce657a5f4fb0b9e60a84968c808888aed628cd98ab8771fe1db98ed" + ], + "markers": "python_version >= '3.6'", + "version": "==0.5.2" + }, + "schema": { + "hashes": [ + "sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197", + "sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c" + ], + "version": "==0.7.5" + }, + "setuptools": { + "hashes": [ + "sha256:c5afc8f407c626b8313a86e10311dd3f661c6cd9c09d4bf8c15c0e11f9f2b0e6", + "sha256:e3982f444617239225d675215d51f6ba05f845d4eec313da4418fdbb56fb27e3" + ], + "markers": "python_version >= '3.9'", + "version": "==75.8.0" + }, + "six": { + "hashes": [ + "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", + "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2'", + "version": "==1.17.0" + }, + "urllib3": { + "hashes": [ + "sha256:0ed14ccfbf1c30a9072c7ca157e4319b70d65f623e91e7b32fadb2853431016e", + "sha256:40c2dc0c681e47eb8f90e7e27bf6ff7df2e677421fd46756da1161c39ca70d32" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", + "version": "==1.26.20" + } + }, + "develop": {} +} diff --git a/src/version.txt b/src/version.txt index bb6d454..d9cc41e 100644 --- a/src/version.txt +++ b/src/version.txt @@ -1 +1 @@ -__version__ = "1.1.18" +1.1.18+build.1 diff --git a/tag.sh b/tag.sh index e1f7447..0a0e607 100755 --- a/tag.sh +++ b/tag.sh @@ -4,6 +4,6 @@ set -o nounset set -o errexit set -o pipefail -version=$(./bump_version.sh show) +version=$(./bump-version show) git tag "v$version" && git push --tags diff --git a/tests/conftest.py b/tests/conftest.py index b91f1d7..4a17fb4 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -2,6 +2,7 @@ https://docs.pytest.org/en/latest/writing_plugins.html#conftest-py-plugins """ + # Third-Party Libraries import pytest from python_on_whales import docker @@ -9,6 +10,8 @@ MAIN_SERVICE_NAME = "guacscanner" VERSION_SERVICE_NAME = f"{MAIN_SERVICE_NAME}-version" +VERSION_FILE = "src/version.txt" + @pytest.fixture(scope="session") def dockerc(): @@ -35,6 +38,14 @@ def version_container(dockerc): return dockerc.compose.ps(services=[VERSION_SERVICE_NAME], all=True)[0] +@pytest.fixture(scope="session") +def project_version(): + """Return the version of the project.""" + with open(VERSION_FILE) as f: + project_version = f.read().strip() + return project_version + + def pytest_addoption(parser): """Add new commandline options to pytest.""" parser.addoption( diff --git a/tests/container_test.py b/tests/container_test.py index 3a188b8..efa9306 100644 --- a/tests/container_test.py +++ b/tests/container_test.py @@ -1,4 +1,3 @@ -#!/usr/bin/env pytest -vs """Tests for example container.""" # Standard Python Libraries @@ -6,6 +5,7 @@ # Third-Party Libraries import pytest +from semver import parse_version_info RELEASE_TAG = os.getenv("RELEASE_TAG") VERSION_FILE = "src/version.txt" @@ -58,37 +58,28 @@ def test_wait_for_exits(dockerc, main_container, version_container): @pytest.mark.skipif( RELEASE_TAG in [None, ""], reason="this is not a release (RELEASE_TAG not set)" ) -def test_release_version(): +def test_release_version(project_version): """Verify that release tag version agrees with the module version.""" - pkg_vars = {} - with open(VERSION_FILE) as f: - exec(f.read(), pkg_vars) # nosec - project_version = pkg_vars["__version__"] assert ( RELEASE_TAG == f"v{project_version}" ), "RELEASE_TAG does not match the project version" -def test_log_version(dockerc, version_container): +def test_log_version(dockerc, project_version, version_container): """Verify the container outputs the correct version to the logs.""" # make sure container exited if running test isolated dockerc.wait(version_container.id) - log_output = version_container.logs().strip() - pkg_vars = {} - with open(VERSION_FILE) as f: - exec(f.read(), pkg_vars) # nosec - project_version = pkg_vars["__version__"] - assert ( - log_output == project_version + log_version = parse_version_info(version_container.logs().strip()) + assert log_version == parse_version_info( + project_version ), f"Container version output to log does not match project version file {VERSION_FILE}" -def test_container_version_label_matches(version_container): +@pytest.mark.skipif( + RELEASE_TAG in [None, ""], reason="this is not a release (RELEASE_TAG not set)" +) +def test_container_version_label_matches(project_version, version_container): """Verify the container version label is the correct version.""" - pkg_vars = {} - with open(VERSION_FILE) as f: - exec(f.read(), pkg_vars) # nosec - project_version = pkg_vars["__version__"] assert ( version_container.config.labels["org.opencontainers.image.version"] == project_version