Skip to content

debugging

debugging #2863

name: Run Tests and Build
on: [push, pull_request]
defaults:
run:
shell: bash -l {0}
concurrency:
# make sure only one run of this workflow for a given PR or a given branch
# can happen at one time. previous queued or started runs will be cancelled.
# github.workflow is the workflow name
# github.ref is the ref that triggered the workflow run
# on push, this is refs/heads/<branch name>
# on pull request, this is refs/pull/<pull request number>/merge
group: ${{ github.workflow }}-${{ github.ref }}
cancel-in-progress: true
env:
# build: dependency of make install
# nomkl: make sure numpy w/out mkl
# setuptools_scm: needed for versioning to work
CONDA_DEFAULT_DEPENDENCIES: python-build nomkl setuptools_scm 'libsqlite<3.49.1' 'sqlite<3.49.1' # https://github.com/natcap/invest/issues/1797
LATEST_SUPPORTED_PYTHON_VERSION: "3.13"
jobs:
check-syntax-errors:
name: Check for syntax errors
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up python
uses: actions/setup-python@v5
with:
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }}
# this is fast enough that it's not worth caching
- name: Set up environment
run: pip install flake8
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
python -m flake8 src --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
python -m flake8 src --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
check-rst-syntax:
name: Check RST syntax
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
name: Set up python
with:
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }}
- name: Set up environment
run: pip install doc8
- name: Lint with doc8
run: |
# Skip line-too-long errors (D001)
python -m doc8 --ignore D001 HISTORY.rst README_PYTHON.rst
# run-tests-manylinux:
# name: Run model tests (manylinux)
# runs-on: ubuntu-latest
# needs: check-syntax-errors
# strategy:
# fail-fast: false
# matrix:
# python-version: [3.9, "3.10", "3.11", "3.12", "3.13"]
# steps:
# - uses: actions/checkout@v4
# with:
# fetch-depth: 0 # Fetch complete history for accurate versioning
# # NOTE: It takes twice as long to save the sample data cache
# # as it does to do a fresh clone (almost 5 minutes vs. 2.5 minutes)
# # Test data is way, way faster by contrast (on the order of a few
# # seconds to archive).
# - name: Restore git-LFS test data cache
# uses: actions/cache@v3
# with:
# path: data/invest-test-data
# key: git-lfs-testdata-${{ hashfiles('Makefile') }}
# - name: Set up python environment (Ubuntu)
# uses: ./.github/actions/setup_env
# with:
# requirements-files: requirements.txt requirements-dev.txt constraints_tests.txt
# requirements: | # https://github.com/natcap/invest/issues/1797
# ${{ env.CONDA_DEFAULT_DEPENDENCIES }}
# libxcrypt
# python=${{ matrix.python-version }}
# sqlite<3.49.1
# - name: Download previous conda environment.yml
# continue-on-error: true
# # Using 'dawidd6' since 'download-artifact' GH action doesn't
# # support downloading artifacts from prior workflow runs
# uses: dawidd6/action-download-artifact@v6
# with:
# workflow: build-and-test.yml
# # Get frozen conda env artifact from last successful workflow
# workflow_conclusion: success
# name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }}
# path: ./conda-env-artifact
# - name: Build docker image
# run: |
# echo ${{ github.workspace }}
# docker build --progress plain -t manylinux -f .github/workflows/manylinux/Dockerfile .
# mkdir ${{ github.workspace }}/dist
# chmod +777 ${{ github.workspace }}/dist
# - uses: addnab/docker-run-action@v3
# with:
# image: manylinux
# options: -v ${{ github.workspace }}/dist:/home/runner/invest/dist:Z
# run: |
# echo "hello world"
# ls
# ls -ld dist
# # install micromamba
# "${SHELL}" <(curl -L micro.mamba.pm/install.sh)
# source ~/.bashrc
# micromamba --version
# echo ${{ env.CONDA_DEFAULT_DEPENDENCIES }} python=${{ matrix.python-version }} | xargs -n 1 > extra_requirements.txt
# /opt/python/cp313-cp313/bin/python ./scripts/convert-requirements-to-conda-yml.py \
# extra_requirements.txt requirements.txt requirements-dev.txt constraints_tests.txt \
# > environment.yml
# cat environment.yml
# micromamba env create -y -n env -f environment.yml
# micromamba activate env
# # Compare conda environments
# micromamba list > conda-env.txt
# # make sure that the exit code is always 0
# # otherwise, an error appears in the action annotations
# diff ./conda-env.txt ./conda-env-artifact/conda-env.txt || true
# ls -ld .
# ls -ld /home/runner
# ls -ld /home/runner/invest
# ls -ld /home/runner/invest/src/natcap/invest/delineateit
# # uninstall InVEST if it was already in the restored cache
# python -m build --wheel
# ls -la dist
# # Audit and repair wheel
# cd ~/invest
# WHEEL=$(find dist -name "natcap[._-]invest*.whl")
# auditwheel show $WHEEL
# auditwheel repair $WHEEL --plat manylinux_2_34_x86_64 -w dist
# rm $WHEEL # remove the original wheel
# - name: Install and run model tests
# run: |
# pip install $(find dist -name "natcap[._-]invest*.whl")
# make test
# - name: Upload wheel artifact
# uses: actions/upload-artifact@v4
# with:
# name: Wheel for ${{ matrix.os }} ${{ matrix.python-version }}
# path: ~/invest/dist
# - name: Upload conda env artifact
# uses: actions/upload-artifact@v4
# continue-on-error: true
# with:
# name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }}
# path: ~/invest/conda-env.txt
# - name: Authenticate GCP
# if: github.event_name != 'pull_request'
# uses: google-github-actions/auth@v2
# with:
# credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
# - name: Set up GCP
# if: github.event_name != 'pull_request'
# uses: google-github-actions/setup-gcloud@v2
# - name: Deploy artifacts to GCS
# if: github.event_name != 'pull_request'
# run: |
# cd ~/invest
# make deploy
run-model-tests:
name: Run model tests
runs-on: ${{ matrix.os }}
container: ${{ matrix.container }}
needs: check-syntax-errors
strategy:
fail-fast: false
matrix:
python-version: [3.9, "3.10", "3.11", "3.12", "3.13"]
os: [windows-latest, macos-13, ubuntu-latest]
include:
- os: ubuntu-latest
container: quay.io/pypa/manylinux_2_34_x86_64
shell: su - runner {0}
setup-conda-home: /home/runner
- os: ubuntu-latest
python-version: 3.9
platform-specific-dependencies: libxcrypt
- os: ubuntu-latest
python-version: "3.10"
platform-specific-dependencies: libxcrypt
- os: windows-latest
shell: bash -l {0}
setup-conda-home: $HOME
- os: macos-13
shell: bash -l {0}
setup-conda-home: $HOME
defaults:
run:
shell: ${{ matrix.shell }}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch complete history for accurate versioning
# NOTE: It takes twice as long to save the sample data cache
# as it does to do a fresh clone (almost 5 minutes vs. 2.5 minutes)
# Test data is way, way faster by contrast (on the order of a few
# seconds to archive).
- name: Restore git-LFS test data cache
uses: actions/cache@v3
with:
path: data/invest-test-data
key: git-lfs-testdata-${{ hashfiles('Makefile') }}
- name: Set up python environment (Mac, Windows)
if: matrix.os != 'ubuntu-latest'
uses: ./.github/actions/setup_env
with:
requirements-files: requirements.txt requirements-dev.txt constraints_tests.txt
requirements: ${{ env.CONDA_DEFAULT_DEPENDENCIES }} python=${{ matrix.python-version }}
- name: Set up python environment (Ubuntu)
if: matrix.os == 'ubuntu-latest'
uses: ./.github/actions/setup_env
env:
HOME: ${{ matrix.setup-conda-home }}
PIP_ROOT_USER_ACTION: ignore
with:
python: /opt/python/cp313-cp313/bin/python
requirements-files: requirements.txt requirements-dev.txt constraints_tests.txt
requirements: | # https://github.com/natcap/invest/issues/1797
${{ env.CONDA_DEFAULT_DEPENDENCIES }}
${{ matrix.platform-specific-dependencies }}
python=${{ matrix.python-version }}
- name: Download previous conda environment.yml
continue-on-error: true
# Using 'dawidd6' since 'download-artifact' GH action doesn't
# support downloading artifacts from prior workflow runs
uses: dawidd6/action-download-artifact@v6
with:
workflow: build-and-test.yml
# Get frozen conda env artifact from last successful workflow
workflow_conclusion: success
name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }}
path: ./conda-env-artifact
- name: Build docker image
run: |
echo ${{ github.workspace }}
docker build --progress plain -t manylinux -f .github/workflows/manylinux/Dockerfile .
mkdir ${{ github.workspace }}/dist
chmod +777 ${{ github.workspace }}/dist
- name: Build manylinux wheel
if: matrix.os == 'ubuntu-latest'
uses: addnab/docker-run-action@v3
with:
image: manylinux
options: -v ${{ github.workspace }}/dist:/home/runner/invest/dist:Z
run: |
echo "hello world"
ls
ls -ld dist
# install micromamba
"${SHELL}" <(curl -L micro.mamba.pm/install.sh)
source ~/.bashrc
micromamba --version
echo ${{ env.CONDA_DEFAULT_DEPENDENCIES }} python=${{ matrix.python-version }} | xargs -n 1 > extra_requirements.txt
/opt/python/cp313-cp313/bin/python ./scripts/convert-requirements-to-conda-yml.py \
extra_requirements.txt requirements.txt requirements-dev.txt constraints_tests.txt \
> environment.yml
cat environment.yml
micromamba env create -y -n env -f environment.yml
micromamba activate env
# Compare conda environments
micromamba list > conda-env.txt
# make sure that the exit code is always 0
# otherwise, an error appears in the action annotations
diff ./conda-env.txt ./conda-env-artifact/conda-env.txt || true
# uninstall InVEST if it was already in the restored cache
python -m build --wheel
ls -la dist
# Audit and repair wheel
cd ~/invest
WHEEL=$(find dist -name "natcap[._-]invest*.whl")
auditwheel show $WHEEL
auditwheel repair $WHEEL --plat manylinux_2_34_x86_64 -w dist
rm $WHEEL # remove the original wheel
- name: Compare conda environments
run: |
micromamba list > conda-env.txt
# make sure that the exit code is always 0
# otherwise, an error appears in the action annotations
diff ./conda-env.txt ./conda-env-artifact/conda-env.txt || true
# the working-directory key doesn't work as expected in the docker container,
# so we need to cd into the invest directory in the following steps
- name: Build wheel
if: matrix.os != 'ubuntu-latest'
run: |
# uninstall InVEST if it was already in the restored cache
cd ~/invest
python -m pip uninstall -y natcap.invest
python -m build --wheel
ls -la dist
- name: Install and run model tests
run: |
pip install $(find dist -name "natcap[._-]invest*.whl")
make test
- name: Upload wheel artifact
uses: actions/upload-artifact@v4
with:
name: Wheel for ${{ matrix.os }} ${{ matrix.python-version }}
path: dist
- name: Upload conda env artifact
uses: actions/upload-artifact@v4
continue-on-error: true
with:
name: Conda Env for ${{ matrix.os }} ${{ matrix.python-version }}
path: conda-env.txt
- name: Authenticate GCP
if: github.event_name != 'pull_request'
uses: google-github-actions/auth@v2
with:
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
- name: Set up GCP
if: github.event_name != 'pull_request'
uses: google-github-actions/setup-gcloud@v2
- name: Deploy artifacts to GCS
if: github.event_name != 'pull_request'
run: make deploy
test-source-distribution:
name: Check sdist
runs-on: ${{ matrix.os }}
needs: check-syntax-errors
strategy:
fail-fast: false
matrix:
python-version: [3.9, "3.10", "3.11", "3.12", "3.13"]
os: [windows-latest, macos-13, ubuntu-latest]
include:
- os: ubuntu-latest
python-version: 3.9
platform-specific-dependencies: libxcrypt
- os: ubuntu-latest
python-version: "3.10"
platform-specific-dependencies: libxcrypt
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch complete history for accurate versioning
- uses: ./.github/actions/setup_env
with:
requirements-files: requirements.txt
requirements: |
${{ env.CONDA_DEFAULT_DEPENDENCIES }}
python=${{ matrix.python-version }}
twine ${{ matrix.platform-specific-dependencies }}
- name: Build source distribution
run: |
# Because we're using PEP518 build requirements, the user's
# computer is guaranteed to have cython available at build
# time. Thus, it is no longer necessary to distribute the
# .cpp files in addition to the .pyx files.
#
# Elevating any python warnings to errors to catch build issues ASAP.
python -W error -m build --sdist
- name: Install from source distribution
run : |
# Install natcap.invest from the sdist in dist/
pip install $(find dist -name "natcap[._-]invest*")
# Model tests should cover model functionality, we just want
# to be sure that we can import `natcap.invest` here.
# The point here is to make sure that we can build
# natcap.invest from source and that it imports.
python -c "from natcap.invest import *"
- name: Check long description with twine
run: twine check $(find dist -name "natcap[._-]invest*")
- uses: actions/upload-artifact@v4
with:
# NOTE: if you change this name, make sure the source distribution
# pattern defined in .github/workflows/release-part-2.yml will still
# grab it!
name: Source distribution ${{ matrix.os }} ${{ matrix.python-version }}
path: dist
# Secrets not available in PR so don't use GCP.
# Only upload sdist in one of the matrix cases so we don't
# overwrite artifacts or have duplicates (mac/windows sdists have
# different extensions)
- name: Authenticate GCP
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' && matrix.python-version == env.LATEST_SUPPORTED_PYTHON_VERSION
uses: google-github-actions/auth@v2
with:
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
- name: Set up GCP
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' && matrix.python-version == env.LATEST_SUPPORTED_PYTHON_VERSION
uses: google-github-actions/setup-gcloud@v2
- name: Deploy artifacts to GCS
if: github.event_name != 'pull_request' && matrix.os == 'macos-13' && matrix.python-version == env.LATEST_SUPPORTED_PYTHON_VERSION
run: make deploy
validate-resources:
name: Validate Sampledata & User Guide
runs-on: windows-latest
needs: check-syntax-errors
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch complete history for accurate versioning
- uses: ./.github/actions/setup_env
with:
requirements-files: requirements.txt
requirements: |
${{ env.CONDA_DEFAULT_DEPENDENCIES }}
python=${{ env.LATEST_SUPPORTED_PYTHON_VERSION }}
pytest
- name: Make install
run: make install
- name: Validate sample data
run: make validate_sampledata
- name: Validate user guide links
run: make validate_userguide_filenames
run-workbench-tests:
name: Run Workbench Tests
runs-on: ${{ matrix.os }}
needs: check-syntax-errors
strategy:
fail-fast: false
max-parallel: 4
matrix:
os: [windows-latest, macos-13]
steps:
- name: Check out repo
uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch complete history for accurate versioning
- name: Set up python environment
uses: ./.github/actions/setup_env
with:
requirements-files: requirements.txt requirements-dev.txt
requirements: |
${{ env.CONDA_DEFAULT_DEPENDENCIES }}
python=${{ env.LATEST_SUPPORTED_PYTHON_VERSION }}
- name: Make install
run: make install
- name: Set up node
uses: actions/setup-node@v3
with:
node-version: 20
- name: Restore node_modules cache
id: nodemodules-cache
uses: actions/cache@v3
with:
path: workbench/node_modules
key: ${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('workbench/yarn.lock') }}
- name: Install workbench dependencies
if: steps.nodemodules-cache.outputs.cache-hit != 'true'
working-directory: workbench
run: |
yarn config set network-timeout 600000 -g
yarn install
- name: Run workbench tests
working-directory: workbench
env:
CI: true
run: yarn test
build-binaries:
name: Build binaries
needs: check-syntax-errors
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [macos-13, windows-latest]
include:
- os: macos-13
puppeteer-log: ~/Library/Logs/invest-workbench/
workspace-path: InVEST-failed-mac-workspace.tar
binary-extension: dmg
- os: windows-latest
puppeteer-log: ~/AppData/Roaming/invest-workbench/logs/
workspace-path: ${{ github.workspace }}
binary-extension: exe
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch complete history for accurate versioning
- name: Set up conda environment
uses: ./.github/actions/setup_env
with:
requirements-files: |
requirements.txt
requirements-dev.txt
requirements-docs.txt
constraints_tests.txt
requirements: |
${{ env.CONDA_DEFAULT_DEPENDENCIES }}
python=${{ env.LATEST_SUPPORTED_PYTHON_VERSION }}
pandoc
- name: Make install
run: make install
# Not caching chocolatey packages because the cache may not be reliable
# https://github.com/chocolatey/choco/issues/2134
# and this step only takes about 30 seconds.
- name: Install build dependencies (Windows)
if: matrix.os == 'windows-latest'
shell: powershell
run: |
choco install zip unzip
$env:PATH += ";C:\ProgramData\chocolatey\bin"
Import-Module $env:ChocolateyInstall\helpers\chocolateyProfile.psm1
refreshenv # Choco-provided command to reload environment variables
- name: Build userguide
run: make userguide
- name: Build binaries
run: make CONDA="$MAMBA_EXE" binaries
- name: Install Node.js
uses: actions/setup-node@v3
with:
node-version: 18
- name: Restore node_modules cache
id: nodemodules-cache
uses: actions/cache@v3
with:
path: workbench/node_modules
key: ${{ runner.os }}-${{ runner.arch }}-${{ hashFiles('workbench/yarn.lock') }}
- name: Install Workbench Dependencies
if: steps.nodemodules-cache.outputs.cache-hit != 'true'
working-directory: workbench
run: |
yarn config set network-timeout 600000 -g
yarn install
- name: Authenticate GCP
if: github.event_name != 'pull_request'
uses: google-github-actions/auth@v2
with:
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
- name: Set up GCP
if: github.event_name != 'pull_request'
uses: google-github-actions/setup-gcloud@v2
- name: Build Workbench
working-directory: workbench
env:
GH_TOKEN: env.GITHUB_TOKEN
DEBUG: electron-builder
CSC_IDENTITY_AUTO_DISCOVERY: false # disable electron-builder code signing
run: |
yarn run build
yarn run dist
- name: Test electron app with puppeteer
working-directory: workbench
run: npx cross-env CI=true yarn run test-electron-app
- name: Deploy artifacts to GCS
if: github.event_name != 'pull_request'
run: make deploy
# This relies on the file existing on GCP, so it must be run after `make
# deploy` is called.
- name: Queue binaries for signing
if: github.event_name != 'pull_request'
env:
ACCESS_TOKEN: ${{ secrets.CODESIGN_QUEUE_ACCESS_TOKEN }}
run: |
make codesign
- name: Upload workbench binary artifact
if: always()
uses: actions/upload-artifact@v4
with:
name: Workbench-${{ runner.os }}-binary
path: workbench/dist/*.${{ matrix.binary-extension }}
- name: Upload user's guide artifact (Windows)
if: matrix.os == 'windows-latest'
uses: actions/upload-artifact@v4
with:
name: InVEST-user-guide
path: dist/InVEST_*_userguide.zip
- name: Upload workbench logging from puppeteer
uses: actions/upload-artifact@v4
if: always()
with:
name: ${{ runner.os }}_puppeteer_log.zip'
path: ${{ matrix.puppeteer-log }}
- name: Run invest-autotest with binaries
if : |
(github.event_name == 'push' &&
(startsWith(github.ref, 'refs/heads/release') || github.ref == 'refs/heads/main')) ||
(github.event_name == 'pull_request' && startsWith(github.head_ref, 'autorelease'))
run: make invest_autotest
- name: Tar the workspace to preserve permissions (macOS)
if: failure() && matrix.os == 'macos-13'
run: tar -cvf ${{ matrix.workspace-path}} ${{ github.workspace }}
- name: Upload workspace on failure
if: ${{ failure() }}
uses: actions/upload-artifact@v4
with:
name: InVEST-failed-${{ runner.os }}-workspace
path: ${{ matrix.workspace-path}}
build-sampledata:
name: Build sampledata archives
needs: check-syntax-errors
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0 # Fetch complete history for accurate versioning
- uses: actions/setup-python@v5
with:
python-version: ${{ env.LATEST_SUPPORTED_PYTHON_VERSION }}
- name: Install dependencies
# dependencies of setup.py, needed to get the version string
run: pip install babel cython numpy setuptools setuptools_scm wheel
- run: make sampledata sampledata_single
- name: Upload sample data artifact
uses: actions/upload-artifact@v4
with:
name: InVEST-sample-data
path: dist/*.zip
- name: Authenticate GCP
if: github.event_name != 'pull_request'
uses: google-github-actions/auth@v2
with:
credentials_json: ${{ secrets.GOOGLE_SERVICE_ACC_KEY }}
- name: Set up GCP
if: github.event_name != 'pull_request'
uses: google-github-actions/setup-gcloud@v2
- name: Deploy artifacts to GCS
if: github.event_name != 'pull_request'
run: make deploy