Skip to content

Merge pull request #888 from BCDA-APS/887-apsu-subnet-check #601

Merge pull request #888 from BCDA-APS/887-apsu-subnet-check

Merge pull request #888 from BCDA-APS/887-apsu-subnet-check #601

Workflow file for this run

name: Unit Tests & Code Coverage
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
branches:
- main
pull_request:
branches:
- main
workflow_dispatch: # allow manual triggering
defaults:
run:
shell: bash -l {0}
jobs:
lint:
name: Code style
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Install Dependencies
run: |
python -m pip install --upgrade pip
pip install flake8
- name: Run flake8
run: |
flake8
- name: Run black
uses: rickstaa/action-black@v1
with:
black_args: ". --check"
install-catalogs:
name: Install & cache databroker catalogs
runs-on: ubuntu-latest
needs: lint
strategy:
matrix:
python-version:
- "3.11"
max-parallel: 5
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.11'
- name: Create Python ${{ matrix.python-version }} environment
uses: mamba-org/setup-micromamba@v1
with:
cache-environment: true
cache-environment-key: env-key-${{ matrix.python-version }}
condarc: |
channel-priority: flexible
environment-file: environment.yml
environment-name: anaconda-test-env-py-${{ matrix.python-version }}
- name: Unpack
run: |
which databroker-pack
which databroker-unpack
cd resources
bash ./unpack.sh
cd ..
- name: Directory Listings
run: |
ls -lAFghR ~/.local/
ls -lAFghR /tmp/*_test/
- name: Prepare archival content
run: |
mkdir -p ~/databroker_catalogs/
mv ~/.local ~/databroker_catalogs/
mv /tmp/*_test ~/databroker_catalogs/
- name: Archive catalog artifacts
uses: actions/upload-artifact@v3
with:
name: databroker_catalogs
path: ~/databroker_catalogs
test-matrix:
name: Python ${{ matrix.python-version }}
runs-on: ubuntu-latest
needs: install-catalogs
strategy:
matrix:
python-version:
- "3.8"
- "3.9"
- "3.10"
- "3.11"
max-parallel: 5
steps:
- uses: actions/checkout@v3
- name: Create Python ${{ matrix.python-version }} environment
uses: mamba-org/setup-micromamba@v1
with:
cache-environment: true
cache-environment-key: env-key-${{ matrix.python-version }}
condarc: |
channel-priority: flexible
environment-file: environment.yml
environment-name: anaconda-test-env-py-${{ matrix.python-version }}
create-args: >-
coveralls
pytest
pytest-cov
python=${{ matrix.python-version }}
setuptools-scm
- name: Initial diagnostics
run: |
micromamba info
micromamba list
conda config --show-sources
conda config --show
micromamba env list
printenv | sort
- name: Directories before Docker
run: ls -lAFghrt ~/
- name: Start EPICS IOCs in Docker
run: |
bash ./.github/scripts/iocmgr.sh start GP gp
bash ./.github/scripts/iocmgr.sh start ADSIM ad
docker ps -a
ls -lAFgh /tmp/docker_ioc/iocad/
ls -lAFgh /tmp/docker_ioc/iocgp/
- name: Directories after Docker
run: ls -lAFghrt ~/
- name: Confirm EPICS IOC is available via caget
shell: bash -l {0}
run: |
docker exec iocgp grep float1 /home/iocgp/dbl-all.txt
docker exec iocgp /opt/base/bin/linux-x86_64/caget gp:UPTIME gp:gp:float1
docker exec iocad /opt/base/bin/linux-x86_64/caget ad:cam1:Acquire_RBV
which caget
caget gp:UPTIME
caget gp:gp:float1
caget ad:cam1:Acquire_RBV
- name: Confirm EPICS IOC is available via PyEpics
shell: bash -l {0}
run: |
python -c "import epics; print(epics.caget('gp:UPTIME'))"
- name: Confirm EPICS IOC is available via ophyd
shell: bash -l {0}
run: |
CMD="import ophyd"
CMD+="; up = ophyd.EpicsSignalRO('gp:UPTIME', name='up')"
CMD+="; pv = ophyd.EpicsSignalRO('gp:gp:float1', name='pv')"
CMD+="; up.wait_for_connection()"
CMD+="; print(up.get(), pv.get())"
python -c "${CMD}"
- name: Download catalog artifacts
uses: actions/download-artifact@v3
with:
name: databroker_catalogs
path: ~/databroker_catalogs
- name: Restore archival content
run: |
mkdir -p ~/.local/share/intake
mv ~/databroker_catalogs/.local/share/intake/* ~/.local/share/intake
mv ~/databroker_catalogs/*_test /tmp/
- name: Diagnostics
shell: bash -l {0}
run: |
df -HT
micromamba list
- name: Test catalog length, expect 53
shell: bash -l {0}
run: python -c "import databroker; print(len(databroker.catalog['apstools_test']))"
- name: Run tests with pytest & coverage
shell: bash -l {0}
run: |
coverage run --concurrency=thread --parallel-mode -m pytest -vvv --exitfirst .
coverage combine
coverage report --precision 3
- name: Upload coverage data to coveralls.io
shell: bash -l {0}
run: |
micromamba list coveralls
which coveralls
coveralls debug
coveralls --service=github
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true
# https://coveralls-python.readthedocs.io/en/latest/usage/configuration.html#github-actions-support
coveralls:
name: Report unit test coverage to coveralls
needs: test-matrix
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Gather coverage and report to Coveralls
run: |
echo "Finally!"
pip3 install --upgrade coveralls
# debug mode: output prepared json and reported files list to stdout
# https://coveralls-python.readthedocs.io/en/latest/troubleshooting.html
coveralls debug
coveralls --service=github --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}