Skip to content

WIP: fix #3389

WIP: fix #3389 #1075

Workflow file for this run

# name: Qiita CI
on:
push:
branches: [ dev ]
pull_request:
jobs:
# derived from https://github.com/actions/example-services/blob/master/.github/workflows/postgres-service.yml
main:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- cover_package: "qiita_db"
- cover_package: "qiita_pet qiita_core qiita_ware"
services:
postgres:
# Docker Hub image
image: postgres:13.4
env:
POSTGRES_DB: postgres
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
COVER_PACKAGE: ${{ matrix.cover_package }}
# Set health checks to wait until postgres has started
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
ports:
# based on https://github.com/actions/example-services/blob/master/.github/workflows/postgres-service.yml#L44-L72
- 5432/tcp
steps:
# Downloads a copy of the code in your repository before running CI tests
- name: Check out repository code
uses: actions/checkout@v2
- name: Setup for conda
uses: conda-incubator/setup-miniconda@v2
with:
auto-update-conda: true
python-version: '3.9'
- name: Basic dependencies install
env:
COVER_PACKAGE: ${{ matrix.cover_package }}
shell: bash -l {0}
run: |
echo "Testing: " $COVER_PACKAGE
# pull out the port so we can modify the configuration file easily
pgport=${{ job.services.postgres.ports[5432] }}
sed -i "s/PORT = 5432/PORT = $pgport/" qiita_core/support_files/config_test.cfg
# PGPASSWORD is read by pg_restore, which is called by the build_db process.
export PGPASSWORD=postgres
# Setting up main qiita conda environment
conda config --add channels conda-forge
conda deactivate
conda create --quiet --yes -n qiita python=3.9 pip libgfortran numpy nginx cython redis
conda env list
conda activate qiita
pip install -U pip
pip install sphinx sphinx-bootstrap-theme nose-timer Click coverage
# Configuring SSH
cp /etc/ssh/sshd_config sshd_config
echo "RSAAuthentication yes" > sshd_config
echo "PubkeyAuthentication yes" > sshd_config
echo "StrictModes no" > sshd_config
sudo mv sshd_config /etc/ssh/sshd_config
sudo systemctl restart ssh
- name: Webdis install
shell: bash -l {0}
run: |
sudo apt-get -y install libevent-dev
git clone https://github.com/nicolasff/webdis
cd webdis
make
- name: Main install
shell: bash -l {0}
run: |
conda activate qiita
export QIITA_ROOTCA_CERT=`pwd`/qiita_core/support_files/ci_rootca.crt
export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg
export REDBIOM_HOST="http://localhost:7379"
pip install . --no-binary redbiom
# 10.2022
# this is for redbiom / biom-format (so fine to delete in the future)
pip install future
pwd
mkdir ~/.qiita_plugins
- name: Install plugins
shell: bash -l {0}
run: |
conda env create -n qtp-biom --file https://data.qiime2.org/distro/amplicon/qiime2-amplicon-2024.5-py39-linux-conda.yml
export QIITA_ROOTCA_CERT=`pwd`/qiita_core/support_files/ci_rootca.crt
export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg
export REDBIOM_HOST="http://localhost:7379"
conda activate qtp-biom
pip install -U pip
pip install https://github.com/qiita-spots/qiita_client/archive/master.zip
pip install https://github.com/qiita-spots/qtp-biom/archive/master.zip
# if QIITA_ROOTCA_CERT is appended to certifi's cacert.pem file,
# then --server-cert does not need to be specified to any plugin,
# including configure_biom.
# echo `python -c "import certifi;print(certifi.where())"`
configure_biom --env-script "source /home/runner/.profile; conda activate qtp-biom" --server-cert $QIITA_ROOTCA_CERT
- name: Starting services
shell: bash -l {0}
run: |
conda activate qiita
export QIITA_ROOTCA_CERT=`pwd`/qiita_core/support_files/ci_rootca.crt
export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg
export REDBIOM_HOST="http://localhost:7379"
echo "1. Setting up redis"
redis-server --daemonize yes --port 7777
redis-server --daemonize yes --port 6379
echo "2. Starting webdis"
pushd webdis
./webdis &
popd
echo "3. Starting redbiom: " $REDBIOM_HOST
curl -s http://localhost:7379/FLUSHALL > /dev/null
redbiom --version
redbiom admin scripts-writable
redbiom admin create-context --name "qiita-test" --description "qiita-test context"
redbiom admin load-sample-metadata --metadata `pwd`/qiita_db/support_files/test_data/templates/1_19700101-000000.txt
redbiom admin load-sample-metadata-search --metadata `pwd`/qiita_db/support_files/test_data/templates/1_19700101-000000.txt
redbiom admin load-sample-data --table `pwd`/qiita_db/support_files/test_data/processed_data/1_study_1001_closed_reference_otu_table.biom --context qiita-test --tag 4
redbiom admin load-sample-data --table `pwd`/qiita_db/support_files/test_data/processed_data/1_study_1001_closed_reference_otu_table-for_redbiom_tests.biom --context qiita-test --tag 5
echo "4. Setting up nginx"
mkdir -p /usr/share/miniconda/envs/qiita/var/run/nginx/
nginx -c ${PWD}/qiita_pet/nginx_example.conf
echo "5. Setting up qiita"
conda activate qiita
qiita-env make --no-load-ontologies
qiita-test-install
qiita plugins update
echo "6. Starting supervisord => multiple qiita instances"
supervisord -c ${PWD}/qiita_pet/supervisor_example.conf
sleep 10
cat /tmp/supervisord.log
echo "7. Starting plugins"
conda deactivate
conda activate qtp-biom
export QIITA_CLIENT_DEBUG_LEVEL=DEBUG
start_biom https://localhost:8383 register ignored
conda deactivate
echo "8. Setting up SSH"
ssh-keygen -t rsa -b 4096 -N '' -f $PWD/qiita_ware/test/test_data/test_key
mkdir ~/.ssh/
cp $PWD/qiita_ware/test/test_data/test_key* ~/.ssh/
cat ~/.ssh/test_key.pub > ~/.ssh/authorized_keys
chmod 600 $PWD/qiita_ware/test/test_data/test_key*
chmod 600 ~/.ssh/*
chmod 700 ~/.ssh/
echo "Connecting as $USER@localhost"
# this line (and the -o StrictHostKeyChecking=no) is so the server
# is added to the list of known servers
scp -O -o StrictHostKeyChecking=no -i $PWD/qiita_ware/test/test_data/test_key $USER@localhost:/home/runner/work/qiita/qiita/qiita_ware/test/test_data/random_key /home/runner/work/qiita/qiita/qiita_ware/test/test_data/random_key_copy_1
- name: Main tests
shell: bash -l {0}
env:
COVER_PACKAGE: ${{ matrix.cover_package }}
run: |
conda activate qiita
export QIITA_ROOTCA_CERT=`pwd`/qiita_core/support_files/ci_rootca.crt
export QIITA_CONFIG_FP=`pwd`/qiita_core/support_files/config_test.cfg
# for testing we only need to have this set, not actually exist
export QIITA_JOB_SCHEDULER_EPILOGUE=`/path/to/epilogue/file`
export REDBIOM_HOST="http://localhost:7379"
nosetests $COVER_PACKAGE --with-doctest --with-coverage --with-timer -v --cover-package=${COVER_PACKAGE// / --cover-package=} -e 'test_submit_EBI_parse_EBI_reply_failure' -e 'test_full_submission'
# killing the qiita server to run the next commands
QIITA_PID=`cat /tmp/supervisord.pid`
kill $QIITA_PID
sleep 10
# due to qiita_db tests being more complex and taking longer than
# the other tests we will only add some extra tests to the run that is
# not testing qiita_db
if [[ "$COVER_PACKAGE" != *"qiita_db"* ]]; then
# 1. testing that we can add some "dummy" studies to the db via
# CLI
test_data_studies/commands.sh;
# 2. making sure that all qiita cron jobs complete as expected
all-qiita-cron-job;
# 3. making sure than a production system has the expected rows
# in all our tables; steps: a. drop test db, b. change $QIITA_CONFIG_FP
# c. create new production system, c. count rows in the db.
qiita-env drop;
cp $QIITA_CONFIG_FP ${QIITA_CONFIG_FP}.bk
sed 's/TEST_ENVIRONMENT = TRUE/TEST_ENVIRONMENT = FALSE/g' ${QIITA_CONFIG_FP}.bk > $QIITA_CONFIG_FP;
qiita-env make --no-load-ontologies;
export PGPASSWORD=postgres
pgport=${{ job.services.postgres.ports[5432] }}
row_counts=`psql -h localhost -U postgres -d qiita_test -p $pgport -c "SELECT SUM(c.reltuples) FROM pg_class c JOIN pg_namespace n on n.oid = c.relnamespace WHERE n.nspname = 'qiita' AND c.relkind = 'r' AND n.nspname NOT IN ('information_schema', 'pg_catalog');"`
if [[ `echo $row_counts` != *" 0 "* ]]; then
echo "***********";
echo "The number of rows in a production system is not what's expected:";
echo $row_counts;
echo "***********";
exit 1
fi
fi
- name: Submit coveralls
uses: AndreMiras/coveralls-python-action@develop
with:
github-token: ${{ secrets.github_token }}
flag-name: "${{ matrix.cover_package }}"
parallel: true
coveralls_finish:
needs: main
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: AndreMiras/coveralls-python-action@develop
with:
github-token: ${{ secrets.github_token }}
parallel-finished: true
lint:
runs-on: ubuntu-latest
steps:
- name: flake8
uses: actions/setup-python@v2
with:
python-version: '3.9'
- name: install dependencies
run: python -m pip install --upgrade pip
- name: Check out repository code
uses: actions/checkout@v2
- name: lint
run: |
pip install -q flake8
flake8 qiita_* setup.py scripts/qiita* notebooks/*/*.py