This commit is contained in:
sommersoft 2022-08-14 03:09:07 +02:00 committed by GitHub
commit 1a536da960
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 1163 additions and 1 deletions

110
.github/workflows/downstream_testing.yml vendored Normal file
View File

@ -0,0 +1,110 @@
name: downstream_testing_2
on:
workflow_dispatch:
pull_request:
branches:
# Only run downstream tests on PRs for releases
- "[0-9]+.[0-9]+.x"
push:
branches:
- downstream_testing_2
jobs:
downstream-ubuntu:
runs-on: ubuntu-latest
timeout-minutes: 60
strategy:
fail-fast: false
matrix:
include:
- name: "pytest-django"
repo: "pytest-dev/pytest-django"
docker_profile: "postgres"
jobs: "test"
workflow_name: "main.yml"
matrix_exclude: |
linting,docs py39-dj40-mysql_innodb-coverage py38-dj32-sqlite-xdist-coverage
py38-dj40-sqlite-xdist-coverage py39-djmain-sqlite-coverage py36-dj32-mysql_myisam-coverage
- name: "pytest-django"
repo: "pytest-dev/pytest-django"
docker_profile: "mysql"
jobs: "test"
workflow_name: "main.yml"
matrix_exclude: |
linting,docs py310-dj40-postgres-xdist-coverage py310-dj32-postgres-xdist-coverage
py39-dj32-postgres-xdist-coverage py38-dj32-sqlite-xdist-coverage
py38-dj40-sqlite-xdist-coverage py39-djmain-sqlite-coverage pypy3-dj32-postgres
py36-dj32-mysql_myisam-coverage
- name: "pytest-django"
repo: "pytest-dev/pytest-django"
docker_profile: "nodb"
jobs: "test"
workflow_name: "main.yml"
matrix_exclude: |
linting,docs py310-dj40-postgres-xdist-coverage py310-dj32-postgres-xdist-coverage
py39-dj32-postgres-xdist-coverage py39-dj40-mysql_innodb-coverage
py36-dj32-mysql_myisam-coverage pypy3-dj32-postgres
- name: "pytest-html"
repo: "pytest-dev/pytest-html"
docker_profile: "nodb"
jobs: "test_python"
workflow_name: "tests.yml"
matrix_exclude: py36-ubuntu
- name: "pytest-order"
repo: "pytest-dev/pytest-order"
docker_profile: "nodb"
jobs: "test"
workflow_name: "pythontests.yml"
matrix_exclude: 3.6
- name: "pytest-mock"
repo: "pytest-dev/pytest-mock"
docker_profile: "nodb"
jobs: "test"
workflow_name: "test.yml"
matrix_exclude: ""
- name: "pytest-cov"
repo: "pytest-dev/pytest-cov"
docker_profile: "nodb"
jobs: "test"
workflow_name: "test.yml"
# Exclude pypy3 for now; stil working on a suitable regex substitution
matrix_exclude: |
3.6 pypy-3.6 pypy-3.7
- name: "pytest-bdd"
repo: "pytest-dev/pytest-bdd"
docker_profile: "nodb"
jobs: "build"
workflow_name: "main.yml"
matrix_exclude: ""
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Checkout ${{ matrix.name }}
uses: actions/checkout@v2
with:
fetch-depth: 0
repository: ${{ matrix.repo }}
path: ${{ matrix.name }}
- name: Run Downstream Tests - ${{ matrix.name }}
run: docker-compose -f ./testing/downstream_testing/docker-compose.yml --profile ${{ matrix.docker_profile }} up --exit-code-from base_${{ matrix.docker_profile }}
env:
DS_NAME: ${{ matrix.name }}
DS_YAML: ./${{ matrix.name }}/.github/workflows/${{ matrix.workflow_name }}
DS_JOBS: ${{ matrix.jobs }}
DS_MATRIX_EXCLUDE: ${{ matrix.matrix_exclude }}
DOCKER_BUILDKIT: 1
COV_CMD: ""
- name: Test Results - ${{ matrix.name }}
if: ${{ always() }}
run: |
docker logs base -t &>docker.log
sort -b -k 1 docker.log

View File

@ -68,6 +68,7 @@ repos:
- packaging - packaging
- tomli - tomli
- types-pkg_resources - types-pkg_resources
- types-PyYAML
- repo: local - repo: local
hooks: hooks:
- id: rst - id: rst

View File

@ -0,0 +1,2 @@
Added a system to run downstream plugin tests against current pytest, using GitHub Actions. Currently
constrained to pytest plugins that utilize GitHub Actions and ``tox`` to accomplish their own tests.

View File

@ -59,6 +59,8 @@ markers = [
"slow", "slow",
# experimental mark for all tests using pexpect # experimental mark for all tests using pexpect
"uses_pexpect", "uses_pexpect",
# runs tests for ./testing/downstream_testing/downstream_runner.py
"downstream",
] ]

View File

@ -64,6 +64,7 @@ console_scripts =
[options.extras_require] [options.extras_require]
testing = testing =
PyYAML
argcomplete argcomplete
hypothesis>=3.56 hypothesis>=3.56
mock mock

View File

@ -0,0 +1,27 @@
FROM sommersoft/pyenv-pytest:latest
COPY entrypoint.sh /entrypoint.sh
# Add mysql apt repository
RUN set -ex; \
# gpg: key 5072E1F5: public key "MySQL Release Engineering <mysql-build@oss.oracle.com>" imported
key='859BE8D7C586F538430B19C2467B942D3A79BD29'; \
export GNUPGHOME="$(mktemp -d)"; \
#gpg --batch --keyserver ha.pool.sks-keyservers.net --recv-keys "$key"; \
for server in ha.pool.sks-keyservers.net \
hkp://p80.pool.sks-keyservers.net:80 \
keyserver.ubuntu.com \
hkp://keyserver.ubuntu.com:80 \
pgp.mit.edu; do \
gpg --keyserver "$server" --recv-keys "$key" && break || echo "Trying new keyserver..."; \
done; \
gpg --batch --export "$key" > /etc/apt/trusted.gpg.d/mysql.gpg; \
gpgconf --kill all; \
rm -rf "$GNUPGHOME"; \
apt-key list > /dev/null
RUN echo 'deb http://repo.mysql.com/apt/debian/ buster mysql-8.0' > /etc/apt/sources.list.d/mysql.list
RUN apt-get update \
&& apt-get install -y postgresql-client mysql-client
ENTRYPOINT [ "/entrypoint.sh" ]

View File

@ -0,0 +1,222 @@
Automated Downstream Testing
++++++++++++++++++++++++++++
Testing pytest Plugins With Changes to pytest
=============================================
Overview
--------
- Utilizes Docker (compose) inside a GitHub Actions workflow to establish the necessary environment(s) to run tests.
- The base image is currently Ubuntu only, and uses ``pyenv`` to have the necessary versions of Python installed.
- The base image is on Docker Hub, which greatly speeds up each run by avoiding ``pyenv``'s compilation process each time.
- The completed image contains a PostgreSQL & mySQL layer (needed by ``pytest-django``, at minimum). Additional layers can be added if necessary.
- Utilizes each plugin's current test workflow to formulate the tests to run.
- The plugin's GitHub Actions workflow is used to get ``tox`` environment settings.
- The plugin's ``tox`` config is adjusted to ensure the local ``pytest`` is used.
- Uses a static command set: ``tox -e {toxenv}``.
How To Add Additional Plugin(s)
-------------------------------
**pytest/testing/downstream_testing/actions_schema.json:**
**********************************************************
``actions_schema.json`` outlines both GitHub Actions workflow fields to get information, and how
that information should be manipulated to provide usable tox environments to run a plugin's tests.
Example ``actions_schema.json`` entry:
.. code:: JSON
"pytest-django": {
"matrix": [
"matrix",
"include"
],
"tox_cmd_build": {
"base": "name",
"prefix": "py",
"sub": {
"pattern": "-coverage$",
"replace": ""
}
}
}
**To add a new plugin, complete the following:**
1. Start by adding a new JSON object, with the name of the repo. ``"pytest-django": {}`` in the example above.
2. Add an array named ``matrix``. Array items should point to the strategy matrix from the repo's GitHub Actions
YAML file. For instance, the ``pytest-django`` example above has a strategy matrix defined as follows
.. code:: YAML
# pytest-django/.github/workflows/main.yml
jobs:
tests:
strategy:
fail-fast: false
matrix:
include:
- name: linting,docs
python: 3.8
allow_failure: false
- name: py310-dj40-postgres-xdist-coverage
python: '3.10'
allow_failure: false
This makes ``["matrix", "include"]`` our target to parse the ``pytest-django`` strategy. This is a "combination"
strategy based on the use of ``include``.
For non-combination strategies, use the matrix field that points to the appropriate choices. Using
``pytest-order`` as a non-combination example:
.. code:: YAML
# pytest-order/.github/workflows/pythontests.yml
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest]
python-version: [3.6, 3.7, 3.8, 3.9, "3.10", pypy3]
The corresponding entry in ``actions_schema.json`` points to ``["matrix", "python-version"]``:
.. code:: JSON
"pytest-order": {
"matrix": [
"matrix",
"python-version"
],
3. Add a JSON object named ``tox_cmd_build``, with three items: ``base``, ``prefix``, and ``sub``.
- ``base``:
- For combination strategies (with ``include``), ``base`` is the field to be used as the basis
of the tox environment. ``base: "name"`` in the ``pytest-django`` example above.
- For non-combination strategies, this field is an empty string.
- ``prefix``:
- For combination strategies, ``prefix`` is used to [dis]qualify entries in ``base``.
``prefix: "py"`` in the ``pytest-django`` example above.
- For non-combination strategies, this field is an emtpy string.
- ``sub``:
- For both combination and non-combination strategies, this JSON object gives a RegEx matching
(``pattern``) and a substituition (``replace``) string. Since these are JSON strings, they cannot be
represented as a Python raw string (``r""``); ensure to properly escape characters.
**Testing additions locally:**
1. Have a local copy of a plugin's GitHub Actions workflow YAML file, as well as the ``tox.ini``. These should
be placed in a sub-folder in ``pytest``; recommended folder name is the repo name.
.. code:: shell
. pytest/
.. pytest-order/
... main.yml
... tox.ini
2. Utilize the ``--dry-run`` flag to run the parsing and command step building, without actually executing
the commands. This will provide debugging information.
.. code:: shell
(.venv) ~/pytest:$> python -m testing.downstream_testing.downstream_runner pytest-order pytest-order/main.yml test --matrix-exclude 3.6 --dry-run
DEBUG | downstream_runner.load_matrix_schema | Loading schema: /home/pytest/testing/downstream_testing/action_schemas.json
DEBUG | downstream_runner.load_matrix_schema | 'pytest-order' schema loaded: {'matrix': ['matrix', 'python-version'],
'tox_cmd_build': {'base': '', 'prefix': '', 'sub': {'pattern': '(\\d|py\\d)\\.*(\\d+)', 'replace': 'py\\1\\2'}}, 'python_version': 'python-version'}
DEBUG | downstream_runner.inject_pytest_dep | toxenv dependencies updated: {'!pytest{60,61,62,624,70}: pytest-xdist', '!pytest50: pytest @ file:///home/pytest'}
DEBUG | downstream_runner.build_run | job_name: test
DEBUG | downstream_runner.parse_matrix | parsed_matrix: [3.6, 3.7, 3.8, 3.9, '3.10', 'pypy3']
DEBUG | downstream_runner.matrix | matrix: {'test': [{'name': 'py37', 'tox_cmd': 'py37'}, {'name': 'py38', 'tox_cmd': 'py38'}, {'name': 'py39', 'tox_cmd': 'py39'},
{'name': 'py310', 'tox_cmd': 'py310'}, {'name': 'pypy3', 'tox_cmd': 'pypy3'}]}
DEBUG | downstream_runner.build_run | matrix[job]: {'name': 'py37', 'tox_cmd': 'py37'}
DEBUG | downstream_runner.build_run | matrix[job]: {'name': 'py38', 'tox_cmd': 'py38'}
DEBUG | downstream_runner.build_run | matrix[job]: {'name': 'py39', 'tox_cmd': 'py39'}
DEBUG | downstream_runner.build_run | matrix[job]: {'name': 'py310', 'tox_cmd': 'py310'}
DEBUG | downstream_runner.build_run | matrix[job]: {'name': 'pypy3', 'tox_cmd': 'pypy3'}
DEBUG | downstream_runner.build_run | built run: {'py37': ['tox -e py37'], 'py38': ['tox -e py38'], 'py39': ['tox -e py39'], 'py310': ['tox -e py310'], 'pypy3': ['tox -e pypy3']}
INFO | downstream_runner.run | --> running: 'tox -e py37'
INFO | downstream_runner.run | --> running: 'tox -e py38'
INFO | downstream_runner.run | --> running: 'tox -e py39'
INFO | downstream_runner.run | --> running: 'tox -e py310'
INFO | downstream_runner.run | --> running: 'tox -e pypy3'
**pytest/.github/workflows/downstream_testing.yml:**
*****************************************************
This GitHub Actions workflow orchestrates the various plugin tests, and only requires some minimal information.
Add a new entry to the combination strategy matrix:
1. ``name``: the plugin's repository name.
2. ``repo``: the ``org/name`` of the plugin's GitHub repository.
3. ``docker_profile``: the docker-compose profile to use for this entry. Docker profiles are defined in
``pytest/testing/downstream_testing/docker-compose.yml``. At the time of this writing, the available
profiles are: ``nodb``, ``mysql``, & ``postgres``.
4. ``jobs``: the list of job(s) to use from the plugin's GitHub Actions workflow.
5. ``workflow_name``: the name of the GitHub Actions workflow file to use (located in ``plugin/.github/workflows``).
6. ``matrix_exclude``: any entries to exclude from the selected matrix (combination & non-combination).
.. code:: YAML
- name: "pytest-django"
repo: "pytest-dev/pytest-django"
docker_profile: "postgres"
jobs: "test"
workflow_name: "main.yml"
matrix_exclude: |
linting,docs py39-dj40-mysql_innodb-coverage ...
- name: "pytest-django"
repo: "pytest-dev/pytest-django"
docker_profile: "mysql"
jobs: "test"
workflow_name: "main.yml"
matrix_exclude: |
linting,docs py310-dj40-postgres-xdist-coverage ...
.. epigraph::
Example 1: using ``pytest-django``, which has a combination strategy matrix, we see two (of three) different
``downstream_testing.yml`` entries. They each require a different database, so each entry uses the appropriate
``docker_profile``. Additionally, to avoid known failures, ``matrix_exclude`` has all items that don't match
the database in use.
.. code:: YAML
- name: "pytest-order"
repo: "pytest-dev/pytest-order"
docker_profile: "nodb"
jobs: "test"
workflow_name: "pythontests.yml"
matrix_exclude: |
3.6
.. epigraph::
Example 2: using ``pytest-order``, which has a non-combination strategy matrix and requires no database.

View File

@ -0,0 +1,92 @@
{
"pytest-django": {
"matrix": [
"matrix",
"include"
],
"tox_cmd_build": {
"base": "name",
"prefix": "py",
"sub":
{
"pattern": "-coverage$",
"replace": ""
}
}
},
"pytest-html": {
"matrix": [
"matrix",
"include"
],
"tox_cmd_build": {
"base": "name",
"prefix": "py",
"sub":
{
"pattern": "(py\\d+)-\\w+",
"replace": "\\1"
}
}
},
"pytest-order": {
"matrix": [
"matrix",
"python-version"
],
"tox_cmd_build": {
"base": "",
"prefix": "",
"sub":
{
"pattern": "(\\d|py\\d)\\.*(\\d+)",
"replace": "py\\1\\2"
}
}
},
"pytest-mock": {
"matrix": [
"matrix",
"python"
],
"tox_cmd_build": {
"base": "",
"prefix": "",
"sub":
{
"pattern": "(\\d|py\\d)\\.*(\\d+)",
"replace": "py\\1\\2"
}
}
},
"pytest-cov": {
"matrix": [
"matrix",
"python-version"
],
"tox_cmd_build": {
"base": "",
"prefix": "",
"sub":
{
"pattern": "^\\d\\.(\\d+)(?:-dev)*",
"replace": "py3\\1"
}
}
},
"pytest-bdd": {
"matrix": [
"matrix",
"python-version"
],
"tox_cmd_build": {
"base": "",
"prefix": "",
"sub":
{
"pattern": "^\\d\\.(\\d+)",
"replace": "py3\\1"
}
}
}
}

View File

@ -0,0 +1,88 @@
version: "3"
services:
base_nodb:
container_name: base
build: .
environment:
- DS_NAME
- DS_YAML
- DS_JOBS
- DS_MATRIX_EXCLUDE
- COV_CMD
- PY_COLORS=1
- TOX_TESTENV_PASSENV=PY_COLORS
volumes:
- type: bind
source: /home/runner/work/pytest/pytest
target: /pytest
volume:
nocopy: true
profiles:
- nodb
base_postgres:
container_name: base
build: .
environment:
- DS_NAME
- DS_YAML
- DS_JOBS
- DS_MATRIX_EXCLUDE
- TEST_DB_USER=$USER
- TEST_DB_PASSWORD=pytest_djang0
- TEST_DB_HOST=postgres_db
- PY_COLORS=1
- TOX_TESTENV_PASSENV=PY_COLORS
volumes:
- type: bind
source: /home/runner/work/pytest/pytest
target: /pytest
volume:
nocopy: true
depends_on:
- postgres_db
profiles:
- postgres
postgres_db:
image: "postgres:latest"
environment:
- POSTGRES_PASSWORD=pytest_djang0
- POSTGRES_USER=$USER
volumes:
- /etc/passwd:/etc/passwd:ro
- ./data/db:/var/lib/postgresql/data
profiles:
- postgres
base_mysql:
container_name: base
build: .
environment:
- DS_NAME
- DS_YAML
- DS_JOBS
- DS_MATRIX_EXCLUDE
- TEST_DB_USER=root
- TEST_DB_PASSWORD=root
- TEST_DB_HOST=mysql_db
- PY_COLORS=1
- TOX_TESTENV_PASSENV=PY_COLORS
volumes:
- type: bind
source: /home/runner/work/pytest/pytest
target: /pytest
volume:
nocopy: true
depends_on:
- mysql_db
profiles:
- mysql
mysql_db:
image: "mysql:latest"
command: --default-authentication-plugin=mysql_native_password
environment:
- MYSQL_ROOT_PASSWORD=root
volumes:
- ./data/db:/var/lib/mysql
profiles:
- mysql

View File

@ -0,0 +1,328 @@
from __future__ import annotations
import argparse
import configparser
import json
import logging
import os.path
import re
import shlex
import subprocess
from collections import UserDict
from typing import Any
from typing import Iterable
from typing import Match
from typing import TYPE_CHECKING
import yaml
if TYPE_CHECKING:
from typing_extensions import TypedDict
logging.basicConfig(
format="%(levelname)s | %(module)s.%(funcName)s | %(message)s", level="INFO"
)
logger = logging.getLogger(__name__)
parser = argparse.ArgumentParser(description="pytest downstream plugins test runner")
parser.add_argument("repo", help="Name of the repo.")
parser.add_argument("source", help="Path to source YAML file.")
parser.add_argument("jobs", nargs="+", help="Job names to use.")
parser.add_argument(
"--matrix-exclude", nargs="*", default=[], help="Exclude these matrix names."
)
parser.add_argument(
"--dry-run",
action="store_true",
help="Do not run parsed downstream action. Only display the generated command list and debug information.",
)
if TYPE_CHECKING:
_BaseUserDict = UserDict[Any, Any]
class SchemaBase(TypedDict):
repo: str
class SchemaToxBase(TypedDict):
base: str
prefix: str
sub: dict[str, str]
class SchemaType(SchemaBase, total=False):
matrix: list[str]
tox_cmd_build: SchemaToxBase
python_version: str
else:
_BaseUserDict = UserDict
def load_matrix_schema(repo: str) -> SchemaType:
"""Loads the matrix schema for ``repo``"""
schema: SchemaType = {"repo": repo}
working_dir = os.getcwd()
schema_path = os.path.join(
working_dir, "testing", "downstream_testing", "action_schemas.json"
)
logger.debug("Loading schema: %s", schema_path)
if os.path.exists(schema_path):
with open(schema_path) as schema_file:
try:
schema = json.load(schema_file)
except json.JSONDecodeError as exc:
raise RuntimeError(f"Error decoding '{schema_path}'") from exc
else:
raise FileNotFoundError(f"'{schema_path}' not found.")
if repo in schema:
logger.debug("'%s' schema loaded: %s", repo, schema[repo]) # type: ignore
return schema[repo] # type: ignore
else:
raise RuntimeError(
f"'{repo}' schema definition not found in actions_schema.json"
)
TOX_DEP_FILTERS = {
"pytest-rerunfailures": {
"src": "pytest-rerunfailures @ git+https://github.com/pytest-dev/pytest-rerunfailures.git",
"condition": r"^pytest-rerunfailures.*",
"has_gen": r"pytest-rerunfailures\w*:",
},
"pytest-xdist": {
"src": "pytest-xdist",
"condition": r"^pytest.*pytest-xdist",
"has_gen": r"pytest\{.*\,7\d.*\}",
},
"pytest": {
"src": f"pytest @ file://{os.getcwd()}",
"condition": r"^pytest(?!\-)",
"has_gen": r"pytest\w*",
},
}
class ToxDepFilter(_BaseUserDict):
def __init__(self) -> None:
self.data = TOX_DEP_FILTERS
def matches_condition(self, match: str) -> str | None:
"""Checks if ``match`` matches any conditions"""
match_found = None
for key, val in self.data.items():
if re.search(val["condition"], match):
match_found = key
break
return match_found
def matches_gen_exp(self, dep: str, match: str) -> Match[str] | None:
"""Checks if ``match`` matches ``dep``['has_gen'] condition."""
return re.match(self.data[dep]["has_gen"], match)
def filter_dep(self, match: str) -> dict[Any, Any] | None:
"""Filters ``match`` based on conditions and returns the ``src`` dependency."""
filtered_match = None
dep_condition = self.matches_condition(match)
if dep_condition is not None:
dep_gen_exp = self.matches_gen_exp(dep_condition, match)
if dep_gen_exp:
filtered_match = {
"src": self.data[dep_condition]["src"],
"gen_exp": dep_gen_exp[0],
}
return filtered_match
class DownstreamRunner:
def __init__(
self,
repo: str,
yaml_source: str,
jobs: Iterable[str],
matrix_exclude: Iterable[str] = (),
dry_run: bool = False,
) -> None:
self.repo = repo
self.yaml_source = yaml_source
self.matrix_exclude = matrix_exclude
self.job_names = jobs
self.dry_run = dry_run
self._yaml_tree: dict[str, Any] | None = None
self._matrix: dict[str, Any] | None = None
self.matrix_schema: SchemaType = load_matrix_schema(self.repo)
@property
def yaml_tree(self) -> dict[str, Any]:
"""The YAML tree built from the ``self.yaml_source`` file."""
if self._yaml_tree is None:
with open(self.yaml_source) as f:
try:
_yaml_tree = yaml.safe_load(f.read())
except yaml.YAMLError as exc:
raise RuntimeError(
f"Error while parsing '{self.yaml_source}'."
) from exc
if _yaml_tree is None:
raise RuntimeError(f"'{self.yaml_source}' failed to parse.")
else:
self._yaml_tree = _yaml_tree
return self._yaml_tree
def inject_pytest_dep(self) -> None:
"""Ensure pytest is a dependency in tox.ini to allow us to use the 'local'
version of pytest. Also ensure other dependencies in ``TOX_DEP_FILTERS``
are defined appropriately.
"""
ini_path = self.repo + "/tox.ini"
pytest_dep = TOX_DEP_FILTERS["pytest"]["src"]
tox_source = configparser.ConfigParser()
tox_source.read_file(open(ini_path))
found_dep = []
updated_deps = set()
section_deps = tox_source.get("testenv", "deps", fallback=None)
if section_deps:
tox_dep_filter = ToxDepFilter()
for dep in section_deps.split("\n"):
filtered_dep = tox_dep_filter.filter_dep(dep)
if filtered_dep and filtered_dep["src"] not in found_dep:
found_dep.append(filtered_dep["src"])
updated_deps.add(
f"!{filtered_dep['gen_exp']}: {filtered_dep['src']}"
)
if not [item for item in updated_deps if pytest_dep in item]:
updated_deps.add(pytest_dep)
final_deps = "\n".join(updated_deps)
logger.debug("toxenv dependencies updated: %s", updated_deps)
tox_source["testenv"][
"deps"
] = f"{tox_source['testenv']['deps']}\n{final_deps}"
with open(ini_path, "w") as f:
tox_source.write(f)
def __repr__(self) -> str:
return str(
"DownstreamRunner("
f"repo={self.repo}, "
f"yaml_source={self.yaml_source}, "
f"job_names={self.job_names}, "
f"matrix={self._matrix}, "
")"
)
@property
def matrix(self) -> dict[str, Iterable[dict[str, str]]]:
"""Iterates over ``self.yaml_tree``'s strategy matrix for each job in ``self.jobs``, and passes each
through ``parse_matrix``.
"""
def parse_matrix(yaml_tree: dict[str, Any]) -> Iterable[Any]:
"""Parses ``yaml_tree`` strategy matrix using ``self.matrix_schema`` information."""
parsed_matrix = [] # type: ignore
pre_parsed: dict[str, Any] | Iterable[str | float] = yaml_tree
for key in self.matrix_schema["matrix"]:
if isinstance(pre_parsed, dict):
pre_parsed = pre_parsed[key]
else:
if isinstance(pre_parsed, list):
parsed_matrix = pre_parsed
else:
msg_info = f"repo: {self.repo} | matrix schema: {self.matrix_schema} | parsed result: {pre_parsed}"
raise TypeError(
f"Parsed Actions matrix is invalid. Should be list/array. {msg_info}"
)
logger.debug("parsed_matrix: %s", parsed_matrix)
if parsed_matrix:
tox_base = self.matrix_schema["tox_cmd_build"]["base"]
tox_prefix = self.matrix_schema["tox_cmd_build"]["prefix"]
skip_matrices = []
if "include" in self.matrix_schema["matrix"]:
for item in parsed_matrix:
if (
not item[tox_base].startswith(tox_prefix)
or item[tox_base] in self.matrix_exclude
or not item.get("os", "ubuntu").startswith("ubuntu")
):
skip_matrices.append(item)
continue
item["tox_cmd"] = re.sub(
self.matrix_schema["tox_cmd_build"]["sub"]["pattern"],
self.matrix_schema["tox_cmd_build"]["sub"]["replace"],
item[tox_base],
)
logger.debug("re.sub: %s", item[tox_base])
for matrice in skip_matrices:
parsed_matrix.remove(matrice)
else:
new_parsed_matrix = []
for item in parsed_matrix:
if str(item) in self.matrix_exclude:
continue
tox_cmd = re.sub(
self.matrix_schema["tox_cmd_build"]["sub"]["pattern"],
self.matrix_schema["tox_cmd_build"]["sub"]["replace"],
str(item),
)
new_parsed_matrix.append({"name": tox_cmd, "tox_cmd": tox_cmd})
parsed_matrix = new_parsed_matrix
return parsed_matrix
if self._matrix is None:
matrix_items = {}
for job in self.job_names:
job_yaml = self.yaml_tree["jobs"][job]["strategy"]
parsed_matrix = parse_matrix(job_yaml)
matrix_items[job] = parsed_matrix
self._matrix = matrix_items
logger.debug("matrix: %s", self._matrix)
return self._matrix
def build_run(self) -> dict[str, list[str]]:
"""Builds the list of commands for all necessary jobs via ``self.matrix``."""
run = {}
for job in self.job_names:
logger.debug("job_name: %s", job)
for matrix in self.matrix[job]:
logger.debug("matrix[job]: %s", matrix)
run[matrix["name"]] = [f"tox -e {matrix['tox_cmd']}"]
logger.debug("built run: %s", run)
return run
def run(self) -> None:
self.inject_pytest_dep()
run_steps = self.build_run()
os.chdir(self.repo)
for matrix, steps in run_steps.items():
for step in steps:
cmd = shlex.split(step)
logger.info("--> running: '%s'", step)
if not self.dry_run:
subprocess.run(cmd, encoding="utf-8", check=True)
if __name__ == "__main__":
cli_args = parser.parse_args()
if cli_args.dry_run:
logger.setLevel("DEBUG")
runner = DownstreamRunner(
cli_args.repo,
cli_args.source,
cli_args.jobs,
matrix_exclude=cli_args.matrix_exclude,
dry_run=cli_args.dry_run,
)
runner.run()

View File

@ -0,0 +1,6 @@
#!/bin/bash
python3.9 -m pip install --no-cache-dir pyyaml tox sh
cd /pytest
python3.9 -u -m testing.downstream_testing.downstream_runner $DS_NAME $DS_YAML $DS_JOBS --matrix-exclude $DS_MATRIX_EXCLUDE

View File

@ -0,0 +1,280 @@
from __future__ import annotations
import sys
from typing import Any
import pytest
sys.path.append("testing")
from downstream_testing import downstream_runner # noqa: E402
xfail = pytest.mark.xfail
DUMMY_YAML_COMBINATION = """---
name: dummy-include
jobs:
test:
runs-on: ubuntu-20.04
strategy:
fail-fast: false
matrix:
include:
- name: i-do-not-start-with-py
python: 3.6
allow_failure: false
- name: py310-dj40-postgres-xdist-coverage
python: '3.10'
allow_failure: false
- name: py36-exclude-me
python: 3.6
allow_failure: false
- name: py37-exclude-me
python: 3.7
allow_failure: false
"""
DUMMY_YAML_NO_COMBINATION = """---
name: dummy-no-include
jobs:
test:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, windows-latest]
python-version: [3.6, 3.7, 3.8, 3.9, "3.10", pypy3]
exclude:
- os: windows-latest
python-version: pypy3
"""
@pytest.fixture()
def mock_schema_combination(monkeypatch):
def mock_load_schema_combination(repo: str) -> dict[str, Any]:
return {
"matrix": ["matrix", "include"],
"tox_cmd_build": {
"base": "name",
"prefix": "py",
"sub": {"pattern": "-coverage$", "replace": ""},
},
"python_version": "python",
}
monkeypatch.setattr(
downstream_runner, "load_matrix_schema", mock_load_schema_combination
)
@pytest.fixture()
def mock_schema_no_combination(monkeypatch):
def mock_load_schema_no_combination(repo: str) -> dict[str, Any]:
return {
"matrix": ["matrix", "python-version"],
"tox_cmd_build": {
"base": "",
"prefix": "",
"sub": {"pattern": "(\\d|py\\d)\\.*(\\d+)", "replace": "py\\1\\2"},
},
"python_version": "python",
}
monkeypatch.setattr(
downstream_runner, "load_matrix_schema", mock_load_schema_no_combination
)
@pytest.mark.downstream
class TestDownstreamRunnerConstructor:
@pytest.mark.parametrize(
"args",
[
pytest.param(tuple(), marks=xfail),
pytest.param(("repo",), marks=xfail),
pytest.param(("repo", "yaml_source"), marks=xfail),
("pytest-downstream", "yaml_source", ["jobs"]),
],
ids=["no args", "repo only", "missing jobs", "all args"],
)
def test_args(self, args, mock_schema_combination) -> None:
downstream_runner.DownstreamRunner(*args)
@pytest.mark.parametrize(
"kwargs",
[
("matrix_exclude", "exclude"),
("dry_run", True),
],
ids=["matrix_exclude", "dry_run"],
)
def test_kwargs(self, kwargs, mock_schema_combination) -> None:
args = ("pytest-downstream", "yaml_source", ["test"])
new_kwargs = {kwargs[0]: kwargs[1]}
runner = downstream_runner.DownstreamRunner(*args, **new_kwargs)
assert kwargs[1] == getattr(runner, kwargs[0])
@pytest.mark.downstream
class TestDownstreamRunnerProperties:
def test_yaml_tree_file_doesnt_exist(self, mock_schema_combination) -> None:
runner = downstream_runner.DownstreamRunner(
"pytest-downstream", "yaml_source", ["test"], dry_run=True
)
with pytest.raises(FileNotFoundError):
runner.yaml_tree
def test_yaml_tree_bad_yaml(self, mock_schema_combination, tmp_path) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text("---\n:")
runner = downstream_runner.DownstreamRunner(
"pytest-downstream", yaml_source, ["test"], dry_run=True
)
with pytest.raises(RuntimeError) as excinfo:
runner.yaml_tree
assert str(yaml_source) in excinfo.exconly(tryshort=True)
def test_yaml_tree_empty_yaml(self, mock_schema_combination, tmp_path) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text("---")
runner = downstream_runner.DownstreamRunner(
"pytest-downstream", yaml_source, ["test"], dry_run=True
)
with pytest.raises(RuntimeError) as excinfo:
runner.yaml_tree
assert str(yaml_source) in excinfo.exconly(tryshort=True)
def test_yaml_tree_passing_yaml(self, mock_schema_combination, tmp_path) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_COMBINATION)
runner = downstream_runner.DownstreamRunner(
"pytest-downstream", yaml_source, ["test"], dry_run=True
)
assert runner.yaml_tree["name"] == "dummy-include"
assert "test" in runner.yaml_tree["jobs"]
def test_matrix_combination(self, mock_schema_combination, tmp_path) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_COMBINATION)
runner = downstream_runner.DownstreamRunner(
"pytest-downstream", yaml_source, ["test"], dry_run=True
)
assert all(
[
matrice.get("tox_cmd", "").startswith("py")
for matrice in runner.matrix["test"]
]
)
def test_matrix_no_combination(self, mock_schema_no_combination, tmp_path) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_NO_COMBINATION)
runner = downstream_runner.DownstreamRunner(
"pytest-downstream", yaml_source, ["test"], dry_run=True
)
assert all(
[
matrice.get("tox_cmd", "").startswith("py")
for matrice in runner.matrix["test"]
]
)
def test_matrix_combination_matrix_exclude(
self, mock_schema_combination, tmp_path
) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_COMBINATION)
matrix_to_exclude = ["py36-exclude-me", "py37-exclude-me"]
runner = downstream_runner.DownstreamRunner(
"pytest-downstream",
yaml_source,
["test"],
matrix_exclude=matrix_to_exclude,
dry_run=True,
)
matrix_names = {matrice["name"] for matrice in runner.matrix["test"]}
assert matrix_names.isdisjoint(set(matrix_to_exclude))
def test_matrix_no_combination_matrix_exclude(
self, mock_schema_no_combination, tmp_path
) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_NO_COMBINATION)
matrix_to_exclude = ["3.6", "3.7"]
runner = downstream_runner.DownstreamRunner(
"pytest-downstream",
yaml_source,
["test"],
matrix_exclude=matrix_to_exclude,
dry_run=True,
)
matrix_names = {matrice["name"] for matrice in runner.matrix["test"]}
assert matrix_names.isdisjoint({"py36", "py37"})
@pytest.mark.downstream
class TestDownstreamRunnerBuild:
def test_build_run_combination_matrix(
self, mock_schema_combination, tmp_path
) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_COMBINATION)
matrix_to_exclude = ["py36-exclude-me", "py37-exclude-me"]
runner = downstream_runner.DownstreamRunner(
"pytest-downstream",
yaml_source,
["test"],
matrix_exclude=matrix_to_exclude,
dry_run=True,
)
run = runner.build_run()
assert run == {
"py310-dj40-postgres-xdist-coverage": [
"tox -e py310-dj40-postgres-xdist",
]
}
def test_build_run_no_combination_matrix(
self, mock_schema_no_combination, tmp_path
) -> None:
yaml_source = tmp_path / "test.yml"
yaml_source.write_text(DUMMY_YAML_NO_COMBINATION)
matrix_to_exclude = ["3.6", "3.7"]
runner = downstream_runner.DownstreamRunner(
"pytest-downstream",
yaml_source,
["test"],
matrix_exclude=matrix_to_exclude,
dry_run=True,
)
run = runner.build_run()
assert run == {
"py310": ["tox -e py310"],
"py38": ["tox -e py38"],
"py39": ["tox -e py39"],
"pypy3": ["tox -e pypy3"],
}

View File

@ -16,6 +16,7 @@ envlist =
py37-freeze py37-freeze
docs docs
docs-checklinks docs-checklinks
downstream
@ -27,7 +28,7 @@ commands =
coverage: coverage report -m coverage: coverage report -m
passenv = USER USERNAME COVERAGE_* PYTEST_ADDOPTS TERM SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST passenv = USER USERNAME COVERAGE_* PYTEST_ADDOPTS TERM SETUPTOOLS_SCM_PRETEND_VERSION_FOR_PYTEST
setenv = setenv =
_PYTEST_TOX_DEFAULT_POSARGS={env:_PYTEST_TOX_POSARGS_DOCTESTING:} {env:_PYTEST_TOX_POSARGS_LSOF:} {env:_PYTEST_TOX_POSARGS_XDIST:} _PYTEST_TOX_DEFAULT_POSARGS={env:_PYTEST_TOX_POSARGS_DOCTESTING:} {env:_PYTEST_TOX_POSARGS_LSOF:} {env:_PYTEST_TOX_POSARGS_XDIST:} {env:_PYTEST_TOX_POSARGS_DOWNSTREAM:}
# Configuration to run with coverage similar to CI, e.g. # Configuration to run with coverage similar to CI, e.g.
# "tox -e py37-coverage". # "tox -e py37-coverage".
@ -43,6 +44,8 @@ setenv =
lsof: _PYTEST_TOX_POSARGS_LSOF=--lsof lsof: _PYTEST_TOX_POSARGS_LSOF=--lsof
xdist: _PYTEST_TOX_POSARGS_XDIST=-n auto xdist: _PYTEST_TOX_POSARGS_XDIST=-n auto
downstream: _PYTEST_TOX_POSARGS_DOWNSTREAM=-m downstream
extras = testing extras = testing
deps = deps =
doctesting: PyYAML doctesting: PyYAML