diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs
index 1ce1ca378..5e91ea0fa 100644
--- a/.git-blame-ignore-revs
+++ b/.git-blame-ignore-revs
@@ -6,3 +6,6 @@
# upgrade to black 23.1, longer lines (100)
7fe8c0739b0515d00daabc7db87bc5fad926d345
+
+# upgrade to black 24
+80c9ec01b4602fc51ac1f53744f98df8baba3c31
diff --git a/.github/workflows/ci-tests.yml b/.github/workflows/ci-tests.yml
index 6b2132f7a..ae295e534 100644
--- a/.github/workflows/ci-tests.yml
+++ b/.github/workflows/ci-tests.yml
@@ -28,26 +28,19 @@ jobs:
tox:
name: Tox
- runs-on: ubuntu-20.04 # 22.04 doesn't support Python 3.6
+ runs-on: ubuntu-22.04
strategy:
matrix:
py-ver-major: [3]
- py-ver-minor: [6, 7, 8, 9, 10, 11]
+ py-ver-minor: [8, 9, 10, 11, 12]
step: [lint, unit, bandit, mypy]
- exclude:
- - py-ver-major: 3
- py-ver-minor: 6
- step: mypy
- - py-ver-major: 3
- py-ver-minor: 6
- step: lint
env:
py-semver: ${{ format('{0}.{1}', matrix.py-ver-major, matrix.py-ver-minor) }}
TOXENV: ${{ format('py{0}{1}-{2}', matrix.py-ver-major, matrix.py-ver-minor, matrix.step) }}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
@@ -62,9 +55,10 @@ jobs:
run: sudo usermod -c 'CI Runner' "$(whoami)"
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ env.py-semver }}
+ allow-prereleases: true
cache: pip
cache-dependency-path: |
requirements.txt
@@ -77,20 +71,21 @@ jobs:
- name: MyPy cache
if: ${{ matrix.step == 'mypy' }}
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: .mypy_cache/${{ env.py-semver }}
key: mypy-${{ env.py-semver }}
- name: Test with tox
- run: tox
+ run: APPTAINER_TMPDIR=${RUNNER_TEMP} tox
- name: Upload coverage to Codecov
if: ${{ matrix.step == 'unit' }}
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
- token: ${{ secrets.CODECOV_TOKEN }}
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
tox-style:
name: Linters
@@ -101,16 +96,16 @@ jobs:
step: [lintreadme, shellcheck, pydocstyle]
env:
- py-semver: "3.11"
- TOXENV: ${{ format('py311-{0}', matrix.step) }}
+ py-semver: "3.12"
+ TOXENV: ${{ format('py312-{0}', matrix.step) }}
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ env.py-semver }}
cache: pip
@@ -131,9 +126,9 @@ jobs:
name: No leftovers
runs-on: ubuntu-22.04
env:
- py-semver: "3.11"
+ py-semver: "3.12"
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
@@ -146,7 +141,7 @@ jobs:
run: sudo usermod -c 'CI Runner' "$(whoami)"
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
python-version: ${{ env.py-semver }}
cache: pip
@@ -162,7 +157,7 @@ jobs:
chmod a-w .
- name: run tests
- run: make test
+ run: APPTAINER_TMPDIR=${RUNNER_TEMP} make test
conformance_tests:
@@ -180,7 +175,7 @@ jobs:
extras: "--fast-parser"
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Singularity
if: ${{ matrix.container == 'singularity' }}
@@ -190,7 +185,7 @@ jobs:
- name: Singularity cache
if: ${{ matrix.container == 'singularity' }}
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: sifcache
key: singularity
@@ -200,29 +195,36 @@ jobs:
run: sudo rm -f /usr/bin/docker ; sudo apt-get install -y podman
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
- python-version: 3.11
+ python-version: 3.12
cache: pip
- name: "Test CWL ${{ matrix.cwl-version }} conformance"
env:
VERSION: ${{ matrix.cwl-version }}
CONTAINER: ${{ matrix.container }}
- GIT_TARGET: main
- CWLTOOL_OPTIONS: ${{ matrix.extras }}
+ GIT_TARGET: ${{ matrix.cwl-version == 'v1.2' && '1.2.1_proposed' || 'main' }}
+ CWLTOOL_OPTIONS: ${{ matrix.cwl-version == 'v1.2' && '--relax-path-checks' || '' }} ${{ matrix.extras }}
run: ./conformance-test.sh
+ - name: Archive test results
+ uses: actions/upload-artifact@v4
+ with:
+ name: cwl-${{ matrix.cwl-version }}-${{ matrix.container }}${{ matrix.extras }}-conformance-results
+ path: |
+ **/cwltool_conf*.xml
- name: Upload coverage to Codecov
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
- token: ${{ secrets.CODECOV_TOKEN }}
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
release_test:
name: cwltool release test
runs-on: ubuntu-22.04
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
- name: Set up Singularity
run: |
@@ -230,9 +232,9 @@ jobs:
sudo apt-get install -y ./singularity-ce_3.10.4-jammy_amd64.deb
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
- python-version: 3.11
+ python-version: 3.12
cache: pip
cache-dependency-path: |
requirements.txt
@@ -254,7 +256,11 @@ jobs:
build_test_container:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
+ - name: record cwltool version
+ run: pip install -U setuptools wheel && pip install setuptools_scm[toml] && python setup.py --version
- name: build & test cwltool_module container
run: ./build-cwltool-docker.sh
@@ -262,15 +268,15 @@ jobs:
name: Test on macos-latest
runs-on: macos-latest
env:
- TOXENV: py311-unit
+ TOXENV: py312-unit
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
- python-version: 3.11
+ python-version: 3.12
cache: pip
cache-dependency-path: |
requirements.txt
@@ -281,7 +287,7 @@ jobs:
pip install "tox<4" "tox-gh-actions<3"
# # docker for mac install is not currently stable
# - name: 'SETUP MacOS: load Homebrew cache'
- # uses: actions/cache@v3
+ # uses: actions/cache@v4
# if: runner.os == 'macOS'
# with:
# path: |
@@ -293,7 +299,8 @@ jobs:
- name: Test with tox
run: tox
- name: Upload coverage to Codecov
- uses: codecov/codecov-action@v3
+ uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
- token: ${{ secrets.CODECOV_TOKEN }}
+ env:
+ CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 4baf55059..3b84a185b 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -23,13 +23,13 @@ jobs:
steps:
- name: Checkout repository
- uses: actions/checkout@v3
+ uses: actions/checkout@v4
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@v2
+ uses: github/codeql-action/init@v3
with:
languages: python
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@v2
+ uses: github/codeql-action/analyze@v3
diff --git a/.github/workflows/quay-publish.yml b/.github/workflows/quay-publish.yml
index 191fe9d66..8e248fbe3 100644
--- a/.github/workflows/quay-publish.yml
+++ b/.github/workflows/quay-publish.yml
@@ -8,31 +8,39 @@ jobs:
build:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v3
+ - uses: actions/checkout@v4
+ with:
+ fetch-depth: 0
- name: Get image tags
id: image_tags
run: |
- echo -n ::set-output "name=IMAGE_TAGS::${GITHUB_REF#refs/*/}"
+ echo -n "IMAGE_TAGS=${GITHUB_REF#refs/*/}" >> "$GITHUB_OUTPUT"
+ - name: record cwltool version
+ run: |
+ pip install "setuptools>=61"
+ pip install setuptools_scm[toml] wheel
+ python setup.py --version
- name: Set up QEMU
- uses: docker/setup-qemu-action@v2
+ uses: docker/setup-qemu-action@v3
- name: Set up Docker Buildx
- uses: docker/setup-buildx-action@v2
+ uses: docker/setup-buildx-action@v3
- name: Cache Docker layers
- uses: actions/cache@v3
+ uses: actions/cache@v4
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-multi-buildx-${{ github.sha }}
restore-keys: |
${{ runner.os }}-multi-buildx
- name: Login to Quay.io
- uses: docker/login-action@v2
+ uses: docker/login-action@v3
with:
registry: ${{ secrets.REGISTRY_SERVER }}
username: ${{ secrets.REGISTRY_USERNAME }}
password: ${{ secrets.REGISTRY_PASSWORD }}
- name: Build and publish cwltool_module image to Quay
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
+ context: .
file: cwltool.Dockerfile
tags: quay.io/commonwl/cwltool_module:${{ steps.image_tags.outputs.IMAGE_TAGS }},quay.io/commonwl/cwltool_module:latest
target: module
@@ -41,8 +49,9 @@ jobs:
cache-from: type=gha
cache-to: type=gha,mode=max
- name: Build and publish cwltool image to Quay
- uses: docker/build-push-action@v4
+ uses: docker/build-push-action@v5
with:
+ context: .
file: cwltool.Dockerfile
tags: quay.io/commonwl/cwltool:${{ steps.image_tags.outputs.IMAGE_TAGS }},quay.io/commonwl/cwltool:latest
platforms: linux/amd64,linux/arm64
diff --git a/.gitignore b/.gitignore
index 7a280f0df..5941627f8 100644
--- a/.gitignore
+++ b/.gitignore
@@ -53,6 +53,8 @@ value
.python-version
+cwltool/_version.py
+
# Folder created when using make
cwltool_deps
docs/_build/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 8aa65142d..cd9e9105d 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -1,6 +1,6 @@
Style guide:
- PEP-8 (as implemented by the `black` code formatting tool)
-- Python 3.6+ compatible code
+- Python 3.8+ compatible code
- PEP-484 type hints
The development is done using `git`, we encourage you to get familiar with it.
diff --git a/MANIFEST.in b/MANIFEST.in
index 6b533819d..92f65cfe5 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -2,7 +2,7 @@ include README.rst CODE_OF_CONDUCT.md CONTRIBUTING.md
include MANIFEST.in
include LICENSE.txt
include *requirements.txt mypy.ini tox.ini
-include gittaggers.py Makefile cwltool.py
+include Makefile cwltool.py
recursive-include mypy-stubs *.pyi *.py
include tests/*
include tests/cwl-conformance/cwltool-conftest.py
diff --git a/Makefile b/Makefile
index 3aef78d53..92e51f493 100644
--- a/Makefile
+++ b/Makefile
@@ -25,7 +25,7 @@ MODULE=cwltool
# `SHELL=bash` doesn't work for some, so don't use BASH-isms like
# `[[` conditional expressions.
PYSOURCES=$(wildcard ${MODULE}/**.py cwltool/cwlprov/*.py tests/*.py) setup.py
-DEVPKGS=diff_cover pylint pep257 pydocstyle 'tox<4' tox-pyenv \
+DEVPKGS=diff_cover pylint pep257 pydocstyle 'tox<4' tox-pyenv auto-walrus \
isort wheel autoflake pyupgrade bandit -rlint-requirements.txt\
-rtest-requirements.txt -rmypy-requirements.txt -rdocs/requirements.txt
DEBDEVPKGS=pep8 python-autopep8 pylint python-coverage pydocstyle sloccount \
@@ -72,22 +72,18 @@ dev: install-dep
## dist : create a module package for distribution
dist: dist/${MODULE}-$(VERSION).tar.gz
-check-python3:
-# Check that the default python version is python 3
- python --version 2>&1 | grep "Python 3"
-
-dist/${MODULE}-$(VERSION).tar.gz: check-python3 $(SOURCES)
- python setup.py sdist bdist_wheel
+dist/${MODULE}-$(VERSION).tar.gz: $(SOURCES)
+ python3 -m build
## docs : make the docs
docs: FORCE
cd docs && $(MAKE) html
## clean : clean up all temporary / machine-generated files
-clean: check-python3 FORCE
+clean: FORCE
rm -f ${MODULE}/*.pyc tests/*.pyc *.so ${MODULE}/*.so cwltool/cwlprov/*.so
rm -Rf ${MODULE}/__pycache__/
- python setup.py clean --all || true
+ rm -Rf build
rm -Rf .coverage
rm -f diff-cover.html
@@ -122,10 +118,10 @@ codespell-fix:
## format : check/fix all code indentation and formatting (runs black)
format:
- black --exclude cwltool/schemas setup.py cwltool.py cwltool tests mypy-stubs
+ black --exclude cwltool/schemas --exclude cwltool/_version.py setup.py cwltool.py cwltool tests mypy-stubs
format-check:
- black --diff --check --exclude cwltool/schemas setup.py cwltool.py cwltool tests mypy-stubs
+ black --diff --check --exclude cwltool/schemas setup.py --exclude cwltool/_version.py cwltool.py cwltool tests mypy-stubs
## pylint : run static code analysis on Python code
pylint: $(PYSOURCES)
@@ -163,12 +159,12 @@ diff-cover.html: coverage.xml
diff-cover --compare-branch=main $^ --html-report $@
## test : run the cwltool test suite
-test: check-python3 $(PYSOURCES)
- python -m pytest -rs ${PYTEST_EXTRA}
+test: $(PYSOURCES)
+ python3 -m pytest -rs ${PYTEST_EXTRA}
## testcov : run the cwltool test suite and collect coverage
-testcov: check-python3 $(PYSOURCES)
- python -m pytest -rs --cov --cov-config=.coveragerc --cov-report= ${PYTEST_EXTRA}
+testcov: $(PYSOURCES)
+ python3 -m pytest -rs --cov --cov-config=.coveragerc --cov-report= ${PYTEST_EXTRA}
sloccount.sc: $(PYSOURCES) Makefile
sloccount --duplicates --wide --details $^ > $@
@@ -183,24 +179,8 @@ list-author-emails:
mypy3: mypy
mypy: $(PYSOURCES)
- if ! test -f $(shell python -c 'import ruamel.yaml; import os.path; print(os.path.dirname(ruamel.yaml.__file__))')/py.typed ; \
- then \
- rm -Rf mypy-stubs/ruamel/yaml ; \
- ln -s $(shell python -c 'import ruamel.yaml; import os.path; print(os.path.dirname(ruamel.yaml.__file__))') \
- mypy-stubs/ruamel/ ; \
- fi # if minimally required ruamel.yaml version is 0.15.99 or greater, than the above can be removed
MYPYPATH=$$MYPYPATH:mypy-stubs mypy $^
-mypy_3.6: $(filter-out setup.py gittagger.py,$(PYSOURCES))
- if ! test -f $(shell python -c 'import ruamel.yaml; import os.path; print(os.path.dirname(ruamel.yaml.__file__))')/py.typed ; \
- then \
- rm -Rf mypy-stubs/ruamel/yaml ; \
- ln -s $(shell python -c 'import ruamel.yaml; import os.path; print(os.path.dirname(ruamel.yaml.__file__))') \
- mypy-stubs/ruamel/ ; \
- fi # if minimally required ruamel.yaml version is 0.15.99 or greater, than the above can be removed
- MYPYPATH=$$MYPYPATH:mypy-stubs mypy --python-version 3.6 $^
-
-
mypyc: $(PYSOURCES)
MYPYPATH=mypy-stubs CWLTOOL_USE_MYPYC=1 pip install --verbose -e . \
&& pytest -rs -vv ${PYTEST_EXTRA}
@@ -210,15 +190,19 @@ shellcheck: FORCE
cwltool-in-docker.sh
pyupgrade: $(PYSOURCES)
- pyupgrade --exit-zero-even-if-changed --py36-plus $^
+ pyupgrade --exit-zero-even-if-changed --py38-plus $^
+ auto-walrus $^
-release-test: check-python3 FORCE
+release-test: FORCE
git diff-index --quiet HEAD -- || ( echo You have uncommitted changes, please commit them and try again; false )
./release-test.sh
-release: release-test
+release:
+ export SETUPTOOLS_SCM_PRETEND_VERSION_FOR_CWLTOOL=${VERSION} && \
+ ./release-test.sh && \
. testenv2/bin/activate && \
- python testenv2/src/${MODULE}/setup.py sdist bdist_wheel && \
+ pip install build && \
+ python3 -m build testenv2/src/${MODULE} && \
pip install twine && \
twine upload testenv2/src/${MODULE}/dist/* && \
git tag ${VERSION} && git push --tags
diff --git a/README.rst b/README.rst
index b65d74bca..2888f08bb 100644
--- a/README.rst
+++ b/README.rst
@@ -52,7 +52,7 @@ and provide comprehensive validation of CWL
files as well as provide other tools related to working with CWL.
``cwltool`` is written and tested for
-`Python `_ ``3.x {x = 6, 7, 8, 9, 10, 11}``
+`Python `_ ``3.x {x = 6, 8, 9, 10, 11}``
The reference implementation consists of two packages. The ``cwltool`` package
is the primary Python module containing the reference implementation in the
diff --git a/build-cwltool-docker.sh b/build-cwltool-docker.sh
index 97910069a..a70fdf4df 100755
--- a/build-cwltool-docker.sh
+++ b/build-cwltool-docker.sh
@@ -1,9 +1,10 @@
#!/bin/bash
set -ex
-docker build --file=cwltool.Dockerfile --tag=quay.io/commonwl/cwltool_module --target module .
-docker build --file=cwltool.Dockerfile --tag=quay.io/commonwl/cwltool .
+engine=${ENGINE:-docker} # example: `ENGINE=podman ./build-cwltool-docker.sh`
+${engine} build --file=cwltool.Dockerfile --tag=quay.io/commonwl/cwltool_module --target module .
+${engine} build --file=cwltool.Dockerfile --tag=quay.io/commonwl/cwltool .
-docker run -t -v /var/run/docker.sock:/var/run/docker.sock \
+${engine} run -t -v /var/run/docker.sock:/var/run/docker.sock \
-v /tmp:/tmp \
-v "$PWD":/tmp/cwltool \
quay.io/commonwl/cwltool_module /bin/sh -c \
diff --git a/conformance-test.sh b/conformance-test.sh
index 483ad984e..9506c28bb 100755
--- a/conformance-test.sh
+++ b/conformance-test.sh
@@ -41,6 +41,8 @@ set -x
# The directory where this script resides
SCRIPT_DIRECTORY="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
+TMP_DIR=${TMP_DIR:-${SCRIPT_DIRECTORY}}
+
# Download archive from GitHub
if [[ "${VERSION}" = "v1.0" ]] ; then
REPO=common-workflow-language
@@ -49,38 +51,41 @@ else
REPO=cwl-$(echo "$VERSION" | sed 's/\(v[0-9]*\.\)\([0-9]*\).*/\1\2/')
fi
-if [ ! -d "${REPO}-${GIT_TARGET}" ] ; then
- if [ ! -f "${GIT_TARGET}.tar.gz" ] ; then
- wget "https://github.com/common-workflow-language/${REPO}/archive/${GIT_TARGET}.tar.gz"
+if [ ! -d "${TMP_DIR}/${REPO}-${GIT_TARGET}" ] ; then
+ if [ ! -f "${TMP_DIR}/${GIT_TARGET}.tar.gz" ] ; then
+ wget --directory-prefix "${TMP_DIR}" "https://github.com/common-workflow-language/${REPO}/archive/${GIT_TARGET}.tar.gz"
fi
- tar xzf "${GIT_TARGET}.tar.gz"
+ tar --directory "${TMP_DIR}" -x -f "${TMP_DIR}/${GIT_TARGET}.tar.gz"
fi
-if [ "${CONTAINER}" == "docker" ]; then
+if [ -v SKIP_PULL ] ; then
+ echo Skipping node container pull.
+elif [ "${CONTAINER}" == "docker" ]; then
docker pull docker.io/node:slim
-fi
-
-if [ "${CONTAINER}" == "podman" ]; then
+elif [ "${CONTAINER}" == "podman" ]; then
podman pull docker.io/node:slim
-fi
-
-if [ "${CONTAINER}" == "singularity" ]; then
+elif [ "${CONTAINER}" == "singularity" ]; then
export CWL_SINGULARITY_CACHE="$SCRIPT_DIRECTORY/sifcache"
mkdir --parents "${CWL_SINGULARITY_CACHE}"
fi
# Setup environment
-venv cwl-conformance-venv
-pip install -U setuptools wheel pip
-pip uninstall -y cwltool
-pip install "${SCRIPT_DIRECTORY}" -r"${SCRIPT_DIRECTORY}/requirements.txt"
-pip install 'cwltest>=2.3' pytest-cov pytest-xdist
+if [ -v SKIP_INSTALL ] ; then
+ echo 'Skip installing dependencies; cwltool & cwltest must already be installed'
+else
+ venv "${TMP_DIR}/cwl-conformance-venv"
+ pip install -U setuptools wheel pip
+ pip uninstall -y cwltool
+ pip install -r"${SCRIPT_DIRECTORY}/mypy-requirements.txt"
+ CWLTOOL_USE_MYPYC=1 MYPYPATH="${SCRIPT_DIRECTORY}/mypy-stubs" pip install "${SCRIPT_DIRECTORY}" -r"${SCRIPT_DIRECTORY}/requirements.txt"
+ pip install 'cwltest>=2.5' pytest-cov pytest-xdist>=3.2.0 psutil
+fi
# Set conformance test filename
if [[ "${VERSION}" = "v1.0" ]] ; then
- CONFORMANCE_TEST="${SCRIPT_DIRECTORY}/${REPO}-${GIT_TARGET}/${VERSION}/conformance_test_v1.0.yaml"
+ CONFORMANCE_TEST="${TMP_DIR}/${REPO}-${GIT_TARGET}/${VERSION}/conformance_test_v1.0.yaml"
else
- CONFORMANCE_TEST="${SCRIPT_DIRECTORY}/${REPO}-${GIT_TARGET}/conformance_tests.yaml"
+ CONFORMANCE_TEST="${TMP_DIR}/${REPO}-${GIT_TARGET}/conformance_tests.yaml"
fi
cp "${CONFORMANCE_TEST}" "${CONFORMANCE_TEST%".yaml"}.cwltest.yaml"
CONFORMANCE_TEST="${CONFORMANCE_TEST%".yaml"}.cwltest.yaml"
@@ -116,14 +121,18 @@ if (( "${#exclusions[*]}" > 0 )); then
fi
# Build command
-TEST_COMMAND="python -m pytest ${CONFORMANCE_TEST} -n auto -rs --junit-xml=${SCRIPT_DIRECTORY}/cwltool_conf_${VERSION}_${GIT_TARGET}_${CONTAINER}.xml -o junit_suite_name=cwltool_$(echo "${CWLTOOL_OPTIONS}" | tr "[:blank:]-" _)"
+TEST_COMMAND="python -m pytest ${CONFORMANCE_TEST} -n logical --dist worksteal -rs --junit-xml=${TMP_DIR}/cwltool_conf_${VERSION}_${GIT_TARGET}_${CONTAINER}.xml -o junit_suite_name=cwltool_$(echo "${CWLTOOL_OPTIONS}" | tr "[:blank:]-" _)"
if [[ -n "${EXCLUDE}" ]] ; then
TEST_COMMAND="${TEST_COMMAND} --cwl-exclude ${EXCLUDE}"
fi
-TEST_COMMAND="${TEST_COMMAND} --cov --cov-config ${SCRIPT_DIRECTORY}/.coveragerc --cov-report= ${PYTEST_EXTRA}"
+if [ -v SKIP_COV ] ; then
+ echo Skipping gathering of coverage information
+else
+ TEST_COMMAND="${TEST_COMMAND} --cov --cov-config ${SCRIPT_DIRECTORY}/.coveragerc --cov-report= ${PYTEST_EXTRA}"
+ # Clean up all old coverage data
+ find "${SCRIPT_DIRECTORY}" \( -type f -name .coverage -or -name '.coverage.*' -or -name coverage.xml \) -delete
+fi
-# Clean up all old coverage data
-find "${SCRIPT_DIRECTORY}" \( -type f -name .coverage -or -name '.coverage.*' -or -name coverage.xml \) -delete
if [ "$GIT_BRANCH" = "origin/main" ] && [[ "$VERSION" = "v1.0" ]] && [[ "$CONTAINER" = "docker" ]]
then
@@ -153,11 +162,11 @@ echo CWLTOOL_OPTIONS="${CWLTOOL_OPTIONS}"
# Run test
cp "${SCRIPT_DIRECTORY}/tests/cwl-conformance/cwltool-conftest.py" "$(dirname "${CONFORMANCE_TEST}")/conftest.py"
-bash -c "${TEST_COMMAND}"
+bash -c "cd ${TMP_DIR} && ${TEST_COMMAND}"
RETURN_CODE=$?
# Coverage report
-if [ "${RETURN_CODE}" -eq "0" ] ; then
+if [ ! -v SKIP_COV ] && [ "${RETURN_CODE}" -eq "0" ] ; then
coverage report
coverage xml
fi
@@ -172,7 +181,11 @@ then
fi
# Cleanup
-deactivate
+if [ -z "$SKIP_INSTALL" ] ; then
+ echo Skipping venv cleanup
+else
+ deactivate
+fi
#rm -rf "${GIT_TARGET}.tar.gz" "${SCRIPT_DIRECTORY}/${REPO}-${GIT_TARGET}" "${SCRIPT_DIRECTORY}/cwl-conformance-venv"
# Exit
diff --git a/cwltool.Dockerfile b/cwltool.Dockerfile
index 0193b9f32..2b93b8d99 100644
--- a/cwltool.Dockerfile
+++ b/cwltool.Dockerfile
@@ -1,30 +1,31 @@
-FROM python:3.11-alpine as builder
+FROM python:3.12-alpine3.17 as builder
RUN apk add --no-cache git gcc python3-dev libxml2-dev libxslt-dev libc-dev linux-headers
WORKDIR /cwltool
COPY . .
-RUN CWLTOOL_USE_MYPYC=1 MYPYPATH=mypy-stubs pip wheel --no-binary schema-salad \
+RUN export SETUPTOOLS_SCM_PRETEND_VERSION_FOR_CWLTOOL=$(grep __version__ cwltool/_version.py | awk -F\' '{ print $2 }' | tr -d '\\n') ; \
+ CWLTOOL_USE_MYPYC=1 MYPYPATH=mypy-stubs pip wheel --no-binary schema-salad \
--wheel-dir=/wheels .[deps] # --verbose
RUN rm /wheels/schema_salad*
RUN pip install "black~=22.0"
-# galaxy-util 22.1.2 depends on packaging<22, but black 23.x needs packaging>22
+# galaxy-util 22.1.x depends on packaging<22, but black 23.x needs packaging>22
RUN SCHEMA_SALAD_USE_MYPYC=1 MYPYPATH=mypy-stubs pip wheel --no-binary schema-salad \
$(grep schema.salad requirements.txt) "black~=22.0" --wheel-dir=/wheels # --verbose
RUN pip install --force-reinstall --no-index --no-warn-script-location \
--root=/pythonroot/ /wheels/*.whl
# --force-reinstall to install our new mypyc compiled schema-salad package
-FROM python:3.11-alpine as module
+FROM python:3.12-alpine3.17 as module
LABEL maintainer peter.amstutz@curii.com
-RUN apk add --no-cache docker nodejs graphviz libxml2 libxslt
+RUN apk add --no-cache docker nodejs 'graphviz<8' libxml2 libxslt
COPY --from=builder /pythonroot/ /
-FROM python:3.11-alpine
+FROM python:3.12-alpine3.17
LABEL maintainer peter.amstutz@curii.com
-RUN apk add --no-cache docker nodejs graphviz libxml2 libxslt
+RUN apk add --no-cache docker nodejs 'graphviz<8' libxml2 libxslt
COPY --from=builder /pythonroot/ /
COPY cwltool-in-docker.sh /cwltool-in-docker.sh
diff --git a/cwltool.code-workspace b/cwltool.code-workspace
deleted file mode 100644
index 876a1499c..000000000
--- a/cwltool.code-workspace
+++ /dev/null
@@ -1,8 +0,0 @@
-{
- "folders": [
- {
- "path": "."
- }
- ],
- "settings": {}
-}
\ No newline at end of file
diff --git a/cwltool/argparser.py b/cwltool/argparser.py
index dbb8349b4..33d76cd04 100644
--- a/cwltool/argparser.py
+++ b/cwltool/argparser.py
@@ -396,6 +396,7 @@ def arg_parser() -> argparse.ArgumentParser:
volumegroup = parser.add_mutually_exclusive_group()
volumegroup.add_argument("--verbose", action="store_true", help="Default logging")
+ volumegroup.add_argument("--no-warnings", action="store_true", help="Only print errors.")
volumegroup.add_argument("--quiet", action="store_true", help="Only print warnings and errors.")
volumegroup.add_argument("--debug", action="store_true", help="Print even more logging")
diff --git a/cwltool/builder.py b/cwltool/builder.py
index 3d781db9b..6b561d998 100644
--- a/cwltool/builder.py
+++ b/cwltool/builder.py
@@ -1,4 +1,5 @@
"""Command line builder."""
+
import copy
import logging
import math
@@ -532,7 +533,7 @@ def addsf(
"Reference, a single format string, or a list of "
"format strings. But the list cannot contain CWL "
"Expressions or CWL Parameter References. List "
- f"entry number {index+1} contains the following "
+ f"entry number {index + 1} contains the following "
"unallowed CWL Parameter Reference or Expression: "
f"{entry!r}."
)
diff --git a/cwltool/checker.py b/cwltool/checker.py
index 7184c2666..c386409af 100644
--- a/cwltool/checker.py
+++ b/cwltool/checker.py
@@ -1,10 +1,12 @@
"""Static checking of CWL workflow connectivity."""
+
from collections import namedtuple
from typing import (
Any,
Dict,
Iterator,
List,
+ Literal,
MutableMapping,
MutableSequence,
Optional,
@@ -16,12 +18,11 @@
from schema_salad.exceptions import ValidationException
from schema_salad.sourceline import SourceLine, bullets, strip_dup_lineno
from schema_salad.utils import json_dumps
-from typing_extensions import Literal
from .errors import WorkflowException
from .loghandler import _logger
from .process import shortname
-from .utils import CWLObjectType, CWLOutputAtomType, CWLOutputType, SinkType, aslist
+from .utils import CWLObjectType, CWLOutputType, SinkType, aslist
def _get_type(tp):
@@ -90,8 +91,8 @@ def can_assign_src_to_sink(src: SinkType, sink: Optional[SinkType], strict: bool
return False
if src["type"] == "array" and sink["type"] == "array":
return can_assign_src_to_sink(
- cast(MutableSequence[CWLOutputAtomType], src["items"]),
- cast(MutableSequence[CWLOutputAtomType], sink["items"]),
+ cast(MutableSequence[CWLOutputType], src["items"]),
+ cast(MutableSequence[CWLOutputType], sink["items"]),
strict,
)
if src["type"] == "record" and sink["type"] == "record":
@@ -288,7 +289,9 @@ def static_checker(
)
+ "\n"
+ SourceLine(sink, "type").makeError(
- " with sink '%s' of type %s" % (shortname(sink["id"]), json_dumps(sink["type"]))
+ " with sink '{}' of type {}".format(
+ shortname(sink["id"]), json_dumps(sink["type"])
+ )
)
)
if extra_message is not None:
@@ -507,8 +510,7 @@ def get_step_id(field_id: str) -> str:
def is_conditional_step(param_to_step: Dict[str, CWLObjectType], parm_id: str) -> bool:
- source_step = param_to_step.get(parm_id)
- if source_step is not None:
+ if (source_step := param_to_step.get(parm_id)) is not None:
if source_step.get("when") is not None:
return True
return False
diff --git a/cwltool/command_line_tool.py b/cwltool/command_line_tool.py
index 53805b00f..5b2bea5b2 100644
--- a/cwltool/command_line_tool.py
+++ b/cwltool/command_line_tool.py
@@ -693,7 +693,7 @@ def _initialworkdir(self, j: JobBase, builder: Builder) -> None:
for i, t2 in enumerate(ls):
if not isinstance(t2, Mapping):
raise SourceLine(initialWorkdir, "listing", WorkflowException, debug).makeError(
- "Entry at index %s of listing is not a record, was %s" % (i, type(t2))
+ f"Entry at index {i} of listing is not a record, was {type(t2)}"
)
if "entry" not in t2:
@@ -715,7 +715,9 @@ def _initialworkdir(self, j: JobBase, builder: Builder) -> None:
if not isinstance(t2["entry"], Mapping):
raise SourceLine(initialWorkdir, "listing", WorkflowException, debug).makeError(
- "Entry at index %s of listing is not a record, was %s" % (i, type(t2["entry"]))
+ "Entry at index {} of listing is not a record, was {}".format(
+ i, type(t2["entry"])
+ )
)
if t2["entry"].get("class") not in ("File", "Directory"):
@@ -864,7 +866,7 @@ def calc_checksum(location: str) -> Optional[str]:
and "checksum" in e
and e["checksum"] != "sha1$hash"
):
- return cast(Optional[str], e["checksum"])
+ return cast(str, e["checksum"])
return None
def remove_prefix(s: str, prefix: str) -> str:
@@ -1454,10 +1456,18 @@ def collect_output(
continue
if isinstance(sfitem, str):
sfitem = {"path": pathprefix + sfitem}
- if not fs_access.exists(sfitem["path"]) and sf_required:
+ original_sfitem = copy.deepcopy(sfitem)
+ if (
+ not fs_access.exists(
+ cast(
+ str, cast(CWLObjectType, revmap(sfitem))["location"]
+ )
+ )
+ and sf_required
+ ):
raise WorkflowException(
"Missing required secondary file '%s'"
- % (sfitem["path"])
+ % (original_sfitem["path"])
)
if "path" in sfitem and "location" not in sfitem:
revmap(sfitem)
diff --git a/cwltool/context.py b/cwltool/context.py
index 2fd1f84b5..1e82ecc4a 100644
--- a/cwltool/context.py
+++ b/cwltool/context.py
@@ -1,4 +1,5 @@
"""Shared context objects that replace use of kwargs."""
+
import copy
import os
import shutil
@@ -12,6 +13,7 @@
Dict,
Iterable,
List,
+ Literal,
Optional,
TextIO,
Tuple,
@@ -22,7 +24,6 @@
from schema_salad.avro.schema import Names
from schema_salad.ref_resolver import Loader
from schema_salad.utils import FetcherCallableType
-from typing_extensions import Literal
from .mpi import MpiConfig
from .pathmapper import PathMapper
@@ -42,7 +43,7 @@
class ContextBase:
- """Shared kwargs based initilizer for :py:class:`RuntimeContext` and :py:class:`LoadingContext`."""
+ """Shared kwargs based initializer for :py:class:`RuntimeContext` and :py:class:`LoadingContext`."""
def __init__(self, kwargs: Optional[Dict[str, Any]] = None) -> None:
"""Initialize."""
@@ -197,6 +198,8 @@ def __init__(self, kwargs: Optional[Dict[str, Any]] = None) -> None:
self.mpi_config: MpiConfig = MpiConfig()
self.default_stdout: Optional[Union[IO[bytes], TextIO]] = None
self.default_stderr: Optional[Union[IO[bytes], TextIO]] = None
+ self.validate_only: bool = False
+ self.validate_stdout: Optional[Union[IO[bytes], TextIO, IO[str]]] = None
super().__init__(kwargs)
if self.tmp_outdir_prefix == "":
self.tmp_outdir_prefix = self.tmpdir_prefix
diff --git a/cwltool/cwlprov/__init__.py b/cwltool/cwlprov/__init__.py
index 8ffcff4c3..7a1c0093c 100644
--- a/cwltool/cwlprov/__init__.py
+++ b/cwltool/cwlprov/__init__.py
@@ -6,9 +6,7 @@
import re
import uuid
from getpass import getuser
-from typing import IO, Any, Dict, List, Optional, Tuple, Union
-
-from typing_extensions import TypedDict
+from typing import IO, Any, Callable, Dict, List, Optional, Tuple, TypedDict, Union
from cwltool.cwlprov.provenance_constants import Hasher
@@ -116,24 +114,26 @@ def _valid_orcid(orcid: Optional[str]) -> str:
"oa:motivatedBy": Dict[str, str],
},
)
-Aggregate = TypedDict(
- "Aggregate",
- {
- "uri": Optional[str],
- "bundledAs": Optional[Dict[str, Any]],
- "mediatype": Optional[str],
- "conformsTo": Optional[Union[str, List[str]]],
- "createdOn": Optional[str],
- "createdBy": Optional[Dict[str, str]],
- },
- total=False,
-)
+
+
+class Aggregate(TypedDict, total=False):
+ """RO Aggregate class."""
+
+ uri: Optional[str]
+ bundledAs: Optional[Dict[str, Any]]
+ mediatype: Optional[str]
+ conformsTo: Optional[Union[str, List[str]]]
+ createdOn: Optional[str]
+ createdBy: Optional[Dict[str, str]]
+
+
# Aggregate.bundledAs is actually type Aggregate, but cyclic definitions are not supported
-AuthoredBy = TypedDict(
- "AuthoredBy",
- {"orcid": Optional[str], "name": Optional[str], "uri": Optional[str]},
- total=False,
-)
+class AuthoredBy(TypedDict, total=False):
+ """RO AuthoredBy class."""
+
+ orcid: Optional[str]
+ name: Optional[str]
+ uri: Optional[str]
def checksum_copy(
diff --git a/cwltool/cwlprov/provenance_profile.py b/cwltool/cwlprov/provenance_profile.py
index 05888487b..39f9c4d86 100644
--- a/cwltool/cwlprov/provenance_profile.py
+++ b/cwltool/cwlprov/provenance_profile.py
@@ -29,7 +29,7 @@
from ..loghandler import _logger
from ..process import Process, shortname
from ..stdfsaccess import StdFsAccess
-from ..utils import CWLObjectType, JobsType, posix_path, versionstring
+from ..utils import CWLObjectType, JobsType, posix_path, versionstring, get_listing
from ..workflow_job import WorkflowJob
from . import provenance_constants
from .provenance_constants import (
@@ -244,7 +244,6 @@ def evaluate(
# record provenance of workflow executions
self.prospective_prov(job)
customised_job = copy_job_order(job, job_order_object)
- # Note to self: Listing goes ok here
self.used_artefacts(customised_job, self.workflow_run_uri)
def record_process_start(
@@ -414,9 +413,11 @@ def declare_directory(self, value: CWLObjectType) -> ProvEntity:
# FIXME: .listing might not be populated yet - hopefully
# a later call to this method will sort that
is_empty = True
-
- # if "listing" not in value:
- # get_listing(self.fsaccess, value)
+
+ # get loadlisting, and load the listing if not no_listing, recursively if deep_listing
+ ll = value.get("loadListing")
+ if ll and ll != "no_listing":
+ get_listing(self.fsaccess, value, (ll == "deep_listing"))
for entry in cast(MutableSequence[CWLObjectType], value.get("listing", [])):
is_empty = False
# Declare child-artifacts
@@ -505,7 +506,7 @@ def declare_artefact(self, value: Any) -> ProvEntity:
self.research_object.add_uri(entity.identifier.uri)
return entity
- if isinstance(value, (str, str)):
+ if isinstance(value, str):
(entity, _) = self.declare_string(value)
return entity
diff --git a/cwltool/cwlprov/ro.py b/cwltool/cwlprov/ro.py
index 31f534802..20afca67b 100644
--- a/cwltool/cwlprov/ro.py
+++ b/cwltool/cwlprov/ro.py
@@ -5,6 +5,7 @@
import os
import shutil
import tempfile
+import urllib
import uuid
from pathlib import Path, PurePosixPath
from typing import (
@@ -455,7 +456,7 @@ def generate_snapshot(self, prov_dep: CWLObjectType) -> None:
self.self_check()
for key, value in prov_dep.items():
if key == "location" and cast(str, value).split("/")[-1]:
- location = cast(str, value)
+ location = urllib.parse.unquote(cast(str, value))
filename = location.split("/")[-1]
path = os.path.join(self.folder, SNAPSHOT, filename)
filepath = ""
diff --git a/cwltool/cwlrdf.py b/cwltool/cwlrdf.py
index d91552690..7dcf85cbc 100644
--- a/cwltool/cwlrdf.py
+++ b/cwltool/cwlrdf.py
@@ -51,7 +51,7 @@ def dot_with_parameters(g: Graph, stdout: Union[TextIO, StreamWriter]) -> None:
for step, run, _ in qres:
stdout.write(
- '"%s" [label="%s"]\n' % (lastpart(step), f"{lastpart(step)} ({lastpart(run)})")
+ '"{}" [label="{}"]\n'.format(lastpart(step), f"{lastpart(step)} ({lastpart(run)})")
)
qres = cast(
@@ -170,7 +170,7 @@ def dot_without_parameters(g: Graph, stdout: Union[TextIO, StreamWriter]) -> Non
if str(runtype) != "https://w3id.org/cwl/cwl#Workflow":
stdout.write(
- '"%s" [label="%s"]\n' % (dotname[step], urllib.parse.urldefrag(str(step))[1])
+ f'"{dotname[step]}" [label="{urllib.parse.urldefrag(str(step))[1]}"]\n' # noqa: B907
)
if currentwf is not None:
diff --git a/cwltool/cwlviewer.py b/cwltool/cwlviewer.py
index 47a404a25..e544a568e 100644
--- a/cwltool/cwlviewer.py
+++ b/cwltool/cwlviewer.py
@@ -1,4 +1,5 @@
"""Visualize a CWL workflow."""
+
from pathlib import Path
from typing import Iterator, List, cast
from urllib.parse import urlparse
diff --git a/cwltool/docker.py b/cwltool/docker.py
index bd9637917..a1b9c8a37 100644
--- a/cwltool/docker.py
+++ b/cwltool/docker.py
@@ -2,9 +2,9 @@
import csv
import datetime
+import json
import math
import os
-import re
import shutil
import subprocess # nosec
import sys
@@ -47,7 +47,7 @@ def _get_docker_machine_mounts() -> List[str]:
"-t",
"vboxsf",
],
- universal_newlines=True,
+ text=True,
).splitlines()
]
return __docker_machine_mounts
@@ -113,35 +113,17 @@ def get_image(
if docker_requirement["dockerImageId"] in _IMAGES:
return True
- for line in (
- subprocess.check_output([self.docker_exec, "images", "--no-trunc", "--all"]) # nosec
- .decode("utf-8")
- .splitlines()
- ):
+ if (docker_image_id := docker_requirement.get("dockerImageId")) is not None:
try:
- match = re.match(r"^([^ ]+)\s+([^ ]+)\s+([^ ]+)", line)
- split = docker_requirement["dockerImageId"].split(":")
- if len(split) == 1:
- split.append("latest")
- elif len(split) == 2:
- # if split[1] doesn't match valid tag names, it is a part of repository
- if not re.match(r"[\w][\w.-]{0,127}", split[1]):
- split[0] = split[0] + ":" + split[1]
- split[1] = "latest"
- elif len(split) == 3:
- if re.match(r"[\w][\w.-]{0,127}", split[2]):
- split[0] = split[0] + ":" + split[1]
- split[1] = split[2]
- del split[2]
-
- # check for repository:tag match or image id match
- if match and (
- (split[0] == match.group(1) and split[1] == match.group(2))
- or docker_requirement["dockerImageId"] == match.group(3)
- ):
- found = True
- break
- except ValueError:
+ manifest = json.loads(
+ subprocess.check_output(
+ [self.docker_exec, "inspect", docker_image_id]
+ ).decode( # nosec
+ "utf-8"
+ )
+ )
+ found = manifest is not None
+ except (OSError, subprocess.CalledProcessError, UnicodeError):
pass
if (force_pull or not found) and pull_image:
@@ -225,7 +207,13 @@ def get_from_requirements(
raise WorkflowException("Docker image %s not found" % r["dockerImageId"])
@staticmethod
- def append_volume(runtime: List[str], source: str, target: str, writable: bool = False) -> None:
+ def append_volume(
+ runtime: List[str],
+ source: str,
+ target: str,
+ writable: bool = False,
+ skip_mkdirs: bool = False,
+ ) -> None:
"""Add binding arguments to the runtime list."""
options = [
"type=bind",
@@ -239,7 +227,7 @@ def append_volume(runtime: List[str], source: str, target: str, writable: bool =
mount_arg = output.getvalue().strip()
runtime.append(f"--mount={mount_arg}")
# Unlike "--volume", "--mount" will fail if the volume doesn't already exist.
- if not os.path.exists(source):
+ if (not skip_mkdirs) and (not os.path.exists(source)):
os.makedirs(source)
def add_file_or_directory_volume(
@@ -441,7 +429,10 @@ def create_runtime(
"assurance.",
self.name,
)
-
+ shm_size_od, shm_bool = self.builder.get_requirement("http://commonwl.org/cwltool#ShmSize")
+ if shm_bool:
+ shm_size = cast(CWLObjectType, shm_size_od)["shmSize"]
+ runtime.append(f"--shm-size={shm_size}")
return runtime, cidfile_path
diff --git a/cwltool/docker_id.py b/cwltool/docker_id.py
index 94174f505..bb436b2cb 100644
--- a/cwltool/docker_id.py
+++ b/cwltool/docker_id.py
@@ -30,9 +30,7 @@ def check_output_and_strip(cmd: List[str]) -> Optional[str]:
:return: Stripped string output of the command, or ``None`` if error
"""
try:
- result = subprocess.check_output( # nosec
- cmd, stderr=subprocess.STDOUT, universal_newlines=True
- )
+ result = subprocess.check_output(cmd, stderr=subprocess.STDOUT, text=True) # nosec
return result.strip()
except (OSError, subprocess.CalledProcessError, TypeError, AttributeError):
# OSError is raised if command doesn't exist
diff --git a/cwltool/executors.py b/cwltool/executors.py
index 2585daad5..bfc87f9c7 100644
--- a/cwltool/executors.py
+++ b/cwltool/executors.py
@@ -1,4 +1,5 @@
"""Single and multi-threaded executors."""
+
import datetime
import functools
import logging
@@ -26,6 +27,7 @@
from .command_line_tool import CallbackJob, ExpressionJob
from .context import RuntimeContext, getdefault
+from .cuda import cuda_version_and_device_count
from .cwlprov.provenance_profile import ProvenanceProfile
from .errors import WorkflowException
from .job import JobBase
@@ -142,6 +144,8 @@ def check_for_abstract_op(tool: CWLObjectType) -> None:
process.requirements.append(req)
self.run_jobs(process, job_order_object, logger, runtime_context)
+ if runtime_context.validate_only is True:
+ return (None, "ValidationSuccess")
if self.final_output and self.final_output[0] is not None and finaloutdir is not None:
self.final_output[0] = relocateOutputs(
@@ -172,9 +176,7 @@ def check_for_abstract_op(tool: CWLObjectType) -> None:
):
process_run_id: Optional[str] = None
name = "primary"
- process.parent_wf.generate_output_prov(
- self.final_output[0], process_run_id, name
- ) # Note to self... # , "generate_output_prov")
+ process.parent_wf.generate_output_prov(self.final_output[0], process_run_id, name)
process.parent_wf.document.wasEndedBy(
process.parent_wf.workflow_run_uri,
None,
@@ -239,6 +241,16 @@ def run_jobs(
process_run_id = prov_obj.record_process_start(process, job)
runtime_context = runtime_context.copy()
runtime_context.process_run_id = process_run_id
+ if runtime_context.validate_only is True:
+ if isinstance(job, WorkflowJob):
+ name = job.tool.lc.filename
+ else:
+ name = getattr(job, "name", str(job))
+ print(
+ f"{name} is valid CWL. No errors detected in the inputs.",
+ file=runtime_context.validate_stdout,
+ )
+ return
job.run(runtime_context)
else:
logger.error("Workflow cannot make any more progress.")
@@ -271,8 +283,10 @@ def __init__(self) -> None:
self.max_ram = int(psutil.virtual_memory().available / 2**20)
self.max_cores = float(psutil.cpu_count())
+ self.max_cuda = cuda_version_and_device_count()[1]
self.allocated_ram = float(0)
self.allocated_cores = float(0)
+ self.allocated_cuda: int = 0
def select_resources(
self, request: Dict[str, Union[int, float]], runtime_context: RuntimeContext
@@ -280,7 +294,11 @@ def select_resources(
"""Naïve check for available cpu cores and memory."""
result: Dict[str, Union[int, float]] = {}
maxrsc = {"cores": self.max_cores, "ram": self.max_ram}
- for rsc in ("cores", "ram"):
+ resources_types = {"cores", "ram"}
+ if "cudaDeviceCountMin" in request or "cudaDeviceCountMax" in request:
+ maxrsc["cudaDeviceCount"] = self.max_cuda
+ resources_types.add("cudaDeviceCount")
+ for rsc in resources_types:
rsc_min = request[rsc + "Min"]
if rsc_min > maxrsc[rsc]:
raise WorkflowException(
@@ -295,9 +313,6 @@ def select_resources(
result["tmpdirSize"] = math.ceil(request["tmpdirMin"])
result["outdirSize"] = math.ceil(request["outdirMin"])
- if "cudaDeviceCount" in request:
- result["cudaDeviceCount"] = request["cudaDeviceCount"]
-
return result
def _runner(
@@ -328,6 +343,10 @@ def _runner(
self.allocated_ram -= ram
cores = job.builder.resources["cores"]
self.allocated_cores -= cores
+ cudaDevices: int = cast(
+ int, job.builder.resources.get("cudaDeviceCount", 0)
+ )
+ self.allocated_cuda -= cudaDevices
runtime_context.workflow_eval_lock.notify_all()
def run_job(
@@ -351,16 +370,21 @@ def run_job(
if isinstance(job, JobBase):
ram = job.builder.resources["ram"]
cores = job.builder.resources["cores"]
- if ram > self.max_ram or cores > self.max_cores:
+ cudaDevices = cast(int, job.builder.resources.get("cudaDeviceCount", 0))
+ if ram > self.max_ram or cores > self.max_cores or cudaDevices > self.max_cuda:
_logger.error(
'Job "%s" cannot be run, requests more resources (%s) '
- "than available on this host (max ram %d, max cores %d",
+ "than available on this host (already allocated ram is %d, "
+ "allocated cores is %d, allocated CUDA is %d, "
+ "max ram %d, max cores %d, max CUDA %d).",
job.name,
job.builder.resources,
self.allocated_ram,
self.allocated_cores,
+ self.allocated_cuda,
self.max_ram,
self.max_cores,
+ self.max_cuda,
)
self.pending_jobs.remove(job)
return
@@ -368,17 +392,21 @@ def run_job(
if (
self.allocated_ram + ram > self.max_ram
or self.allocated_cores + cores > self.max_cores
+ or self.allocated_cuda + cudaDevices > self.max_cuda
):
_logger.debug(
'Job "%s" cannot run yet, resources (%s) are not '
"available (already allocated ram is %d, allocated cores is %d, "
- "max ram %d, max cores %d",
+ "allocated CUDA devices is %d, "
+ "max ram %d, max cores %d, max CUDA %d).",
job.name,
job.builder.resources,
self.allocated_ram,
self.allocated_cores,
+ self.allocated_cuda,
self.max_ram,
self.max_cores,
+ self.max_cuda,
)
n += 1
continue
@@ -388,6 +416,8 @@ def run_job(
self.allocated_ram += ram
cores = job.builder.resources["cores"]
self.allocated_cores += cores
+ cuda = cast(int, job.builder.resources.get("cudaDevices", 0))
+ self.allocated_cuda += cuda
self.taskqueue.add(
functools.partial(self._runner, job, runtime_context, TMPDIR_LOCK),
runtime_context.workflow_eval_lock,
diff --git a/cwltool/extensions-v1.1.yml b/cwltool/extensions-v1.1.yml
index 603c40f05..0f53bea57 100644
--- a/cwltool/extensions-v1.1.yml
+++ b/cwltool/extensions-v1.1.yml
@@ -1,6 +1,7 @@
$base: http://commonwl.org/cwltool#
$namespaces:
cwl: "https://w3id.org/cwl/cwl#"
+ cwltool: "http://commonwl.org/cwltool#"
$graph:
- $import: https://w3id.org/cwl/CommonWorkflowLanguage.yml
@@ -119,3 +120,21 @@ $graph:
doc: |
Maximum number of GPU devices to request. If not specified,
same as `cudaDeviceCountMin`.
+- name: ShmSize
+ type: record
+ extends: cwl:ProcessRequirement
+ inVocab: false
+ fields:
+ class:
+ type: string
+ doc: 'cwltool:ShmSize'
+ jsonldPredicate:
+ "_id": "@type"
+ "_type": "@vocab"
+ shmSize:
+ type: string
+ doc: |
+ Size of /dev/shm. The format is ``. must be greater
+ than 0. Unit is optional and can be `b` (bytes), `k` (kilobytes), `m`
+ (megabytes), or `g` (gigabytes). If you omit the unit, the default is
+ bytes. If you omit the size entirely, the value is `64m`."
diff --git a/cwltool/extensions-v1.2.yml b/cwltool/extensions-v1.2.yml
index d7fef1160..c39b15d07 100644
--- a/cwltool/extensions-v1.2.yml
+++ b/cwltool/extensions-v1.2.yml
@@ -1,6 +1,7 @@
$base: http://commonwl.org/cwltool#
$namespaces:
cwl: "https://w3id.org/cwl/cwl#"
+ cwltool: "http://commonwl.org/cwltool#"
$graph:
- $import: https://w3id.org/cwl/CommonWorkflowLanguage.yml
@@ -239,4 +240,21 @@ $graph:
- Specify the desired method of dealing with loop outputs
- Default. Propagates only the last computed element to the subsequent steps when the loop terminates.
- Propagates a single array with all output values to the subsequent steps when the loop terminates.
-
+- name: ShmSize
+ type: record
+ extends: cwl:ProcessRequirement
+ inVocab: false
+ fields:
+ class:
+ type: string
+ doc: 'cwltool:ShmSize'
+ jsonldPredicate:
+ "_id": "@type"
+ "_type": "@vocab"
+ shmSize:
+ type: string
+ doc: |
+ Size of /dev/shm. The format is ``. must be greater
+ than 0. Unit is optional and can be `b` (bytes), `k` (kilobytes), `m`
+ (megabytes), or `g` (gigabytes). If you omit the unit, the default is
+ bytes. If you omit the size entirely, the value is `64m`."
diff --git a/cwltool/extensions.yml b/cwltool/extensions.yml
index 008b1bd3e..4dca0ccd6 100644
--- a/cwltool/extensions.yml
+++ b/cwltool/extensions.yml
@@ -1,6 +1,7 @@
$base: http://commonwl.org/cwltool#
$namespaces:
cwl: "https://w3id.org/cwl/cwl#"
+ cwltool: "http://commonwl.org/cwltool#"
$graph:
- $import: https://w3id.org/cwl/CommonWorkflowLanguage.yml
@@ -229,3 +230,21 @@ $graph:
doc: |
Maximum number of GPU devices to request. If not specified,
same as `cudaDeviceCountMin`.
+- name: ShmSize
+ type: record
+ extends: cwl:ProcessRequirement
+ inVocab: false
+ fields:
+ class:
+ type: string
+ doc: 'cwltool:ShmSize'
+ jsonldPredicate:
+ "_id": "@type"
+ "_type": "@vocab"
+ shmSize:
+ type: string
+ doc: |
+ Size of /dev/shm. The format is ``. must be greater
+ than 0. Unit is optional and can be `b` (bytes), `k` (kilobytes), `m`
+ (megabytes), or `g` (gigabytes). If you omit the unit, the default is
+ bytes. If you omit the size entirely, the value is `64m`."
diff --git a/cwltool/job.py b/cwltool/job.py
index c76f588fd..fe7073856 100644
--- a/cwltool/job.py
+++ b/cwltool/job.py
@@ -66,6 +66,11 @@
from .cwlprov.provenance_profile import (
ProvenanceProfile, # pylint: disable=unused-import
)
+
+ CollectOutputsType = Union[
+ Callable[[str, int], CWLObjectType], functools.partial[CWLObjectType]
+ ]
+
needs_shell_quoting_re = re.compile(r"""(^$|[\s|&;()<>\'"$@])""")
FORCE_SHELLED_POPEN = os.getenv("CWLTOOL_FORCE_SHELL_POPEN", "0") == "1"
@@ -112,9 +117,6 @@ def neverquote(string: str, pos: int = 0, endpos: int = 0) -> Optional[Match[str
return None
-CollectOutputsType = Union[Callable[[str, int], CWLObjectType], functools.partial]
-
-
class JobBase(HasReqsHints, metaclass=ABCMeta):
def __init__(
self,
@@ -144,7 +146,7 @@ def __init__(
self.generatemapper: Optional[PathMapper] = None
# set in CommandLineTool.job(i)
- self.collect_outputs = cast(CollectOutputsType, None)
+ self.collect_outputs = cast("CollectOutputsType", None)
self.output_callback: Optional[OutputCallbackType] = None
self.outdir = ""
self.tmpdir = ""
@@ -190,7 +192,7 @@ def is_streamable(file: str) -> bool:
return False
for inp in self.joborder.values():
if isinstance(inp, dict) and inp.get("location", None) == file:
- return inp.get("streamable", False)
+ return cast(bool, inp.get("streamable", False))
return False
for knownfile in self.pathmapper.files():
@@ -514,17 +516,27 @@ def process_monitor(self, sproc: "subprocess.Popen[str]") -> None:
# Value must be list rather than integer to utilise pass-by-reference in python
memory_usage: MutableSequence[Optional[int]] = [None]
+ mem_tm: "Optional[Timer]" = None
+
def get_tree_mem_usage(memory_usage: MutableSequence[Optional[int]]) -> None:
- children = monitor.children()
+ nonlocal mem_tm
try:
- rss = monitor.memory_info().rss
- while len(children):
- rss += sum(process.memory_info().rss for process in children)
- children = list(itertools.chain(*(process.children() for process in children)))
- if memory_usage[0] is None or rss > memory_usage[0]:
- memory_usage[0] = rss
+ with monitor.oneshot():
+ children = monitor.children()
+ rss = monitor.memory_info().rss
+ while len(children):
+ rss += sum(process.memory_info().rss for process in children)
+ children = list(
+ itertools.chain(*(process.children() for process in children))
+ )
+ if memory_usage[0] is None or rss > memory_usage[0]:
+ memory_usage[0] = rss
+ mem_tm = Timer(interval=1, function=get_tree_mem_usage, args=(memory_usage,))
+ mem_tm.daemon = True
+ mem_tm.start()
except psutil.NoSuchProcess:
- mem_tm.cancel()
+ if mem_tm is not None:
+ mem_tm.cancel()
mem_tm = Timer(interval=1, function=get_tree_mem_usage, args=(memory_usage,))
mem_tm.daemon = True
@@ -586,8 +598,9 @@ def _required_env(self) -> Dict[str, str]:
env["HOME"] = self.outdir
env["TMPDIR"] = self.tmpdir
env["PATH"] = os.environ["PATH"]
- if "SYSTEMROOT" in os.environ:
- env["SYSTEMROOT"] = os.environ["SYSTEMROOT"]
+ for extra in ("SYSTEMROOT", "QEMU_LD_PREFIX"):
+ if extra in os.environ:
+ env[extra] = os.environ[extra]
return env
@@ -751,16 +764,6 @@ def run(
img_id = str(docker_req["dockerImageId"])
elif "dockerPull" in docker_req:
img_id = str(docker_req["dockerPull"])
- cmd = [user_space_docker_cmd, "pull", img_id]
- _logger.info(str(cmd))
- try:
- subprocess.check_call(cmd, stdout=sys.stderr) # nosec
- except OSError as exc:
- raise SourceLine(docker_req, None, WorkflowException, debug).makeError(
- f"Either Docker container {img_id} is not available with "
- f"user space docker implementation {user_space_docker_cmd} "
- f" or {user_space_docker_cmd} is missing or broken."
- ) from exc
else:
raise SourceLine(docker_req, None, WorkflowException, debug).makeError(
"Docker image must be specified as 'dockerImageId' or "
@@ -813,7 +816,7 @@ def run(
_logger.debug("%s error", container, exc_info=True)
if docker_is_req:
raise UnsupportedRequirement(
- "{} is required to run this tool: {}".format(container, str(err))
+ f"{container} is required to run this tool: {str(err)}"
) from err
else:
raise WorkflowException(
@@ -860,13 +863,17 @@ def docker_monitor(
cid: Optional[str] = None
while cid is None:
time.sleep(1)
+ # This is needed to avoid a race condition where the job
+ # was so fast that it already finished when it arrives here
+ if process.returncode is None:
+ process.poll()
if process.returncode is not None:
if cleanup_cidfile:
try:
os.remove(cidfile)
except OSError as exc:
_logger.warning("Ignored error cleaning up %s cidfile: %s", docker_exe, exc)
- return
+ return
try:
with open(cidfile) as cidhandle:
cid = cidhandle.readline().strip()
@@ -1038,6 +1045,26 @@ def terminate(): # type: () -> None
if sproc.stdin is not None:
sproc.stdin.close()
+ tm = None
+ if timelimit is not None and timelimit > 0:
+
+ def terminate(): # type: () -> None
+ try:
+ _logger.warning(
+ "[job %s] exceeded time limit of %d seconds and will be terminated",
+ name,
+ timelimit,
+ )
+ sproc.terminate()
+ except OSError:
+ pass
+
+ tm = Timer(timelimit, terminate)
+ tm.daemon = True
+ tm.start()
+ if monitor_function:
+ monitor_function(sproc)
+
rcode = sproc.wait()
return rcode
diff --git a/cwltool/load_tool.py b/cwltool/load_tool.py
index 622f9c761..d6352f918 100644
--- a/cwltool/load_tool.py
+++ b/cwltool/load_tool.py
@@ -43,7 +43,7 @@
from .update import ALLUPDATES
from .utils import CWLObjectType, ResolverType, visit_class
-jobloaderctx: ContextType = {
+docloaderctx: ContextType = {
"cwl": "https://w3id.org/cwl/cwl#",
"cwltool": "http://commonwl.org/cwltool#",
"path": {"@type": "@id"},
@@ -51,6 +51,15 @@
"id": "@id",
}
+jobloader_id_name = "__id"
+jobloaderctx: ContextType = {
+ "cwl": "https://w3id.org/cwl/cwl#",
+ "cwltool": "http://commonwl.org/cwltool#",
+ "path": {"@type": "@id"},
+ "location": {"@type": "@id"},
+ jobloader_id_name: "@id",
+}
+
overrides_ctx: ContextType = {
"overrideTarget": {"@type": "@id"},
@@ -72,7 +81,7 @@ def default_loader(
doc_cache: bool = True,
) -> Loader:
return Loader(
- jobloaderctx,
+ docloaderctx,
fetcher_constructor=fetcher_constructor,
allow_attachments=lambda r: enable_dev,
doc_cache=doc_cache,
@@ -379,9 +388,7 @@ def resolve_and_validate_document(
loadingContext = loadingContext.copy()
if not isinstance(workflowobj, MutableMapping):
- raise ValueError(
- "workflowjobj must be a dict, got '{}': {}".format(type(workflowobj), workflowobj)
- )
+ raise ValueError(f"workflowjobj must be a dict, got {type(workflowobj)!r}: {workflowobj}")
jobobj = None
if "cwl:tool" in workflowobj:
diff --git a/cwltool/loghandler.py b/cwltool/loghandler.py
index c1f451991..76daa8be9 100644
--- a/cwltool/loghandler.py
+++ b/cwltool/loghandler.py
@@ -1,4 +1,5 @@
"""Shared logger for cwltool."""
+
import logging
import coloredlogs
@@ -11,6 +12,7 @@
def configure_logging(
stderr_handler: logging.Handler,
+ no_warnings: bool,
quiet: bool,
debug: bool,
enable_color: bool,
@@ -21,6 +23,12 @@ def configure_logging(
rdflib_logger = logging.getLogger("rdflib.term")
rdflib_logger.addHandler(stderr_handler)
rdflib_logger.setLevel(logging.ERROR)
+ deps_logger = logging.getLogger("galaxy.tool_util.deps")
+ deps_logger.addHandler(stderr_handler)
+ ss_logger = logging.getLogger("salad")
+ ss_logger.addHandler(stderr_handler)
+ if no_warnings:
+ stderr_handler.setLevel(logging.ERROR)
if quiet:
# Silence STDERR, not an eventual provenance log file
stderr_handler.setLevel(logging.WARN)
@@ -29,6 +37,7 @@ def configure_logging(
base_logger.setLevel(logging.DEBUG)
stderr_handler.setLevel(logging.DEBUG)
rdflib_logger.setLevel(logging.DEBUG)
+ deps_logger.setLevel(logging.DEBUG)
fmtclass = coloredlogs.ColoredFormatter if enable_color else logging.Formatter
formatter = fmtclass("%(levelname)s %(message)s")
if timestamps:
diff --git a/cwltool/main.py b/cwltool/main.py
index 999e9f743..41d258f74 100755
--- a/cwltool/main.py
+++ b/cwltool/main.py
@@ -34,8 +34,8 @@
import argcomplete
import coloredlogs
+import requests
import ruamel.yaml
-from importlib_resources import files
from ruamel.yaml.comments import CommentedMap, CommentedSeq
from ruamel.yaml.main import YAML
from schema_salad.exceptions import ValidationException
@@ -71,6 +71,7 @@
from .load_tool import (
default_loader,
fetch_document,
+ jobloader_id_name,
jobloaderctx,
load_overrides,
make_tool,
@@ -105,10 +106,10 @@
from .utils import (
DEFAULT_TMP_PREFIX,
CWLObjectType,
- CWLOutputAtomType,
CWLOutputType,
HasReqsHints,
adjustDirObjs,
+ files,
normalizeFilesDirs,
processes_to_kill,
trim_listing,
@@ -174,6 +175,14 @@ def _signal_handler(signum: int, _: Any) -> None:
sys.exit(signum)
+def append_word_to_default_user_agent(word: str) -> None:
+ """Append the specified word to the requests http user agent string if it's not already there."""
+ original_function = requests.utils.default_user_agent
+ suffix = f" {word}"
+ if not original_function().endswith(suffix):
+ requests.utils.default_user_agent = lambda *args: original_function(*args) + suffix
+
+
def generate_example_input(
inptype: Optional[CWLOutputType],
default: Optional[CWLOutputType],
@@ -289,7 +298,7 @@ def realize_input_schema(
_, input_type_name = entry["type"].split("#")
if input_type_name in schema_defs:
entry["type"] = cast(
- CWLOutputAtomType,
+ CWLOutputType,
realize_input_schema(
cast(
MutableSequence[Union[str, CWLObjectType]],
@@ -300,7 +309,7 @@ def realize_input_schema(
)
if isinstance(entry["type"], MutableSequence):
entry["type"] = cast(
- CWLOutputAtomType,
+ CWLOutputType,
realize_input_schema(
cast(MutableSequence[Union[str, CWLObjectType]], entry["type"]),
schema_defs,
@@ -308,13 +317,13 @@ def realize_input_schema(
)
if isinstance(entry["type"], Mapping):
entry["type"] = cast(
- CWLOutputAtomType,
+ CWLOutputType,
realize_input_schema([cast(CWLObjectType, entry["type"])], schema_defs),
)
if entry["type"] == "array":
items = entry["items"] if not isinstance(entry["items"], str) else [entry["items"]]
entry["items"] = cast(
- CWLOutputAtomType,
+ CWLOutputType,
realize_input_schema(
cast(MutableSequence[Union[str, CWLObjectType]], items),
schema_defs,
@@ -322,7 +331,7 @@ def realize_input_schema(
)
if entry["type"] == "record":
entry["fields"] = cast(
- CWLOutputAtomType,
+ CWLOutputType,
realize_input_schema(
cast(MutableSequence[Union[str, CWLObjectType]], entry["fields"]),
schema_defs,
@@ -447,7 +456,7 @@ def init_job_order(
_logger.exception("Failed to resolv job_order: %s", cmd_line["job_order"])
exit(1)
else:
- job_order_object = {"id": args.workflow}
+ job_order_object = {jobloader_id_name: args.workflow}
del cmd_line["job_order"]
@@ -507,7 +516,7 @@ def expand_formats(p: CWLObjectType) -> None:
process.inputs_record_schema, job_order_object, discover_secondaryFiles=True
)
basedir: Optional[str] = None
- uri = cast(str, job_order_object["id"])
+ uri = cast(str, job_order_object[jobloader_id_name])
if uri == args.workflow:
basedir = os.path.dirname(uri)
uri = ""
@@ -530,8 +539,8 @@ def expand_formats(p: CWLObjectType) -> None:
if "cwl:tool" in job_order_object:
del job_order_object["cwl:tool"]
- if "id" in job_order_object:
- del job_order_object["id"]
+ if jobloader_id_name in job_order_object:
+ del job_order_object[jobloader_id_name]
return job_order_object
@@ -612,7 +621,7 @@ def loadref(base: str, uri: str) -> Union[CommentedMap, CommentedSeq, str, None]
nestdirs=nestdirs,
)
if sfs is not None:
- deps["secondaryFiles"] = cast(MutableSequence[CWLOutputAtomType], mergedirs(sfs))
+ deps["secondaryFiles"] = cast(MutableSequence[CWLOutputType], mergedirs(sfs))
return deps
@@ -683,8 +692,8 @@ def formatTime(self, record: logging.LogRecord, datefmt: Optional[str] = None) -
def setup_provenance(
args: argparse.Namespace,
- argsl: List[str],
runtimeContext: RuntimeContext,
+ argsl: Optional[List[str]] = None,
) -> Tuple[ProvOut, "logging.StreamHandler[ProvOut]"]:
if not args.compute_checksum:
_logger.error("--provenance incompatible with --no-compute-checksum")
@@ -982,6 +991,12 @@ def main(
workflowobj = None
prov_log_handler: Optional[logging.StreamHandler[ProvOut]] = None
global docker_exe
+
+ user_agent = "cwltool"
+ if user_agent not in (progname := os.path.basename(sys.argv[0])):
+ user_agent += f" {progname}" # append the real program name as well
+ append_word_to_default_user_agent(user_agent)
+
try:
if args is None:
if argsl is None:
@@ -1016,6 +1031,7 @@ def main(
configure_logging(
stderr_handler,
+ args.no_warnings,
args.quiet,
runtimeContext.debug,
args.enable_color,
@@ -1051,10 +1067,8 @@ def main(
prov_log_stream: Optional[Union[io.TextIOWrapper, WritableBagFile]] = None
if args.provenance:
- if argsl is None:
- raise Exception("argsl cannot be None")
try:
- prov_log_stream, prov_log_handler = setup_provenance(args, argsl, runtimeContext)
+ prov_log_stream, prov_log_handler = setup_provenance(args, runtimeContext, argsl)
except ArgumentException:
return 1
@@ -1137,7 +1151,7 @@ def main(
make_template(tool, stdout)
return 0
- if args.validate:
+ if len(args.job_order) == 0 and args.validate:
print(f"{args.workflow} is valid CWL.", file=stdout)
return 0
@@ -1313,10 +1327,14 @@ def main(
use_biocontainers=args.beta_use_biocontainers,
container_image_cache_path=args.beta_dependencies_directory,
)
+ runtimeContext.validate_only = args.validate
+ runtimeContext.validate_stdout = stdout
(out, status) = real_executor(
tool, initialized_job_order_object, runtimeContext, logger=_logger
)
+ if runtimeContext.validate_only is True:
+ return 0
if out is not None:
if runtimeContext.research_obj is not None:
diff --git a/cwltool/mpi.py b/cwltool/mpi.py
index b35b72ee5..2cc1122c6 100644
--- a/cwltool/mpi.py
+++ b/cwltool/mpi.py
@@ -1,4 +1,5 @@
"""Experimental support for MPI."""
+
import inspect
import os
import re
diff --git a/cwltool/pathmapper.py b/cwltool/pathmapper.py
index 660b5ddb1..0a06eb47b 100644
--- a/cwltool/pathmapper.py
+++ b/cwltool/pathmapper.py
@@ -4,8 +4,17 @@
import stat
import urllib
import uuid
-from pathlib import Path
-from typing import Dict, Iterator, List, Optional, Tuple, cast
+from typing import (
+ Dict,
+ ItemsView,
+ Iterable,
+ Iterator,
+ KeysView,
+ List,
+ Optional,
+ Tuple,
+ cast,
+)
from mypy_extensions import mypyc_attr
from schema_salad.exceptions import ValidationException
@@ -210,21 +219,29 @@ def mapper(self, src: str) -> MapperEnt:
return MapperEnt(p.resolved, p.target + src[i:], p.type, p.staged)
return self._pathmap[src]
- def files(self) -> List[str]:
- return list(self._pathmap.keys())
+ def files(self) -> KeysView[str]:
+ """Return a dictionary keys view of locations."""
+ return self._pathmap.keys()
- def items(self) -> List[Tuple[str, MapperEnt]]:
- return list(self._pathmap.items())
+ def items(self) -> ItemsView[str, MapperEnt]:
+ """Return a dictionary items view."""
+ return self._pathmap.items()
- def items_exclude_children(self) -> List[Tuple[str, MapperEnt]]:
+ def items_exclude_children(self) -> ItemsView[str, MapperEnt]:
+ """Return a dictionary items view minus any entries which are children of other entries."""
newitems = {}
- keys = [key for key, entry in self.items()]
+
+ def parents(path: str) -> Iterable[str]:
+ result = path
+ while len(result) > 1:
+ result = os.path.dirname(result)
+ yield result
+
for key, entry in self.items():
- parents = Path(key).parents
- if any([Path(key_) in parents for key_ in keys]):
+ if not self.files().isdisjoint(parents(key)):
continue
newitems[key] = entry
- return list(newitems.items())
+ return newitems.items()
def reversemap(
self,
diff --git a/cwltool/process.py b/cwltool/process.py
index f0c44fe17..42d90a395 100644
--- a/cwltool/process.py
+++ b/cwltool/process.py
@@ -1,4 +1,5 @@
"""Classes and methods relevant for all CWL Process types."""
+
import abc
import copy
import functools
@@ -33,7 +34,6 @@
)
from cwl_utils import expression
-from importlib_resources import files
from mypy_extensions import mypyc_attr
from rdflib import Graph
from ruamel.yaml.comments import CommentedMap, CommentedSeq
@@ -61,7 +61,6 @@
from .update import INTERNAL_VERSION, ORDERED_VERSIONS, ORIGINAL_CWLVERSION
from .utils import (
CWLObjectType,
- CWLOutputAtomType,
CWLOutputType,
HasReqsHints,
JobsGeneratorType,
@@ -71,6 +70,7 @@
aslist,
cmp_like_py2,
ensure_writable,
+ files,
get_listing,
normalizeFilesDirs,
random_outdir,
@@ -121,6 +121,7 @@ def filter(self, record: logging.LogRecord) -> bool:
"http://commonwl.org/cwltool#LoadListingRequirement",
"http://commonwl.org/cwltool#InplaceUpdateRequirement",
"http://commonwl.org/cwltool#CUDARequirement",
+ "http://commonwl.org/cwltool#ShmSize",
]
cwl_files = (
@@ -243,7 +244,7 @@ def stage_files(
"""
items = pathmapper.items() if not symlink else pathmapper.items_exclude_children()
targets: Dict[str, MapperEnt] = {}
- for key, entry in items:
+ for key, entry in list(items):
if "File" not in entry.type:
continue
if entry.target not in targets:
@@ -266,7 +267,7 @@ def stage_files(
)
# refresh the items, since we may have updated the pathmapper due to file name clashes
items = pathmapper.items() if not symlink else pathmapper.items_exclude_children()
- for key, entry in items:
+ for key, entry in list(items):
if not entry.staged:
continue
if not os.path.exists(os.path.dirname(entry.target)):
@@ -980,7 +981,8 @@ def evalResources(
):
if rsc is None:
continue
- mn = mx = None # type: Optional[Union[int, float]]
+ mn: Optional[Union[int, float]] = None
+ mx: Optional[Union[int, float]] = None
if rsc.get(a + "Min"):
with SourceLine(rsc, f"{a}Min", WorkflowException, runtimeContext.debug):
mn = cast(
@@ -1145,7 +1147,7 @@ def mergedirs(
for e in ents.values():
if e["class"] == "Directory" and "listing" in e:
e["listing"] = cast(
- MutableSequence[CWLOutputAtomType],
+ MutableSequence[CWLOutputType],
mergedirs(cast(List[CWLObjectType], e["listing"])),
)
r.extend(ents.values())
@@ -1205,7 +1207,7 @@ def scandeps(
deps["listing"] = doc["listing"]
if doc["class"] == "File" and "secondaryFiles" in doc:
deps["secondaryFiles"] = cast(
- CWLOutputAtomType,
+ CWLOutputType,
scandeps(
base,
cast(
@@ -1289,7 +1291,7 @@ def scandeps(
)
if sf:
deps2["secondaryFiles"] = cast(
- MutableSequence[CWLOutputAtomType], mergedirs(sf)
+ MutableSequence[CWLOutputType], mergedirs(sf)
)
if nestdirs:
deps2 = nestdir(base, deps2)
@@ -1342,10 +1344,15 @@ def compute_checksums(fs_access: StdFsAccess, fileobj: CWLObjectType) -> None:
if "checksum" not in fileobj:
checksum = hashlib.sha1() # nosec
location = cast(str, fileobj["location"])
- with fs_access.open(location, "rb") as f:
- contents = f.read(1024 * 1024)
- while contents != b"":
- checksum.update(contents)
+ if "contents" in fileobj:
+ contents = cast(str, fileobj["contents"]).encode("utf-8")
+ checksum.update(contents)
+ fileobj["size"] = len(contents)
+ else:
+ with fs_access.open(location, "rb") as f:
contents = f.read(1024 * 1024)
+ while contents != b"":
+ checksum.update(contents)
+ contents = f.read(1024 * 1024)
+ fileobj["size"] = fs_access.size(location)
fileobj["checksum"] = "sha1$%s" % checksum.hexdigest()
- fileobj["size"] = fs_access.size(location)
diff --git a/cwltool/run_job.py b/cwltool/run_job.py
index a8fe32496..307872f7a 100644
--- a/cwltool/run_job.py
+++ b/cwltool/run_job.py
@@ -1,4 +1,5 @@
"""Only used when there is a job script or CWLTOOL_FORCE_SHELL_POPEN=1."""
+
import json
import os
import subprocess # nosec
diff --git a/cwltool/secrets.py b/cwltool/secrets.py
index b3f16a3a9..f35f24c37 100644
--- a/cwltool/secrets.py
+++ b/cwltool/secrets.py
@@ -1,4 +1,5 @@
"""Minimal in memory storage of secrets."""
+
import uuid
from typing import Dict, List, MutableMapping, MutableSequence, Optional, cast
diff --git a/cwltool/singularity.py b/cwltool/singularity.py
index f78f912c9..c43183ac7 100644
--- a/cwltool/singularity.py
+++ b/cwltool/singularity.py
@@ -10,9 +10,13 @@
from typing import Callable, Dict, List, MutableMapping, Optional, Tuple, cast
from schema_salad.sourceline import SourceLine
+from spython.main import Client
+from spython.main.parse.parsers.docker import DockerParser
+from spython.main.parse.writers.singularity import SingularityWriter
from .builder import Builder
from .context import RuntimeContext
+from .docker import DockerCommandLineJob
from .errors import WorkflowException
from .job import ContainerCommandLineJob
from .loghandler import _logger
@@ -43,9 +47,7 @@ def get_version() -> Tuple[List[int], str]:
global _SINGULARITY_VERSION # pylint: disable=global-statement
global _SINGULARITY_FLAVOR # pylint: disable=global-statement
if _SINGULARITY_VERSION is None:
- version_output = check_output( # nosec
- ["singularity", "--version"], universal_newlines=True
- ).strip()
+ version_output = check_output(["singularity", "--version"], text=True).strip() # nosec
version_match = re.match(r"(.+) version ([0-9\.]+)", version_output)
if version_match is None:
@@ -108,6 +110,14 @@ def is_version_3_4_or_newer() -> bool:
return v[0][0] >= 4 or (v[0][0] == 3 and v[0][1] >= 4)
+def is_version_3_9_or_newer() -> bool:
+ """Detect if Singularity v3.9+ is available."""
+ if is_apptainer_1_or_newer():
+ return True # this is equivalent to singularity-ce > 3.9.5
+ v = get_version()
+ return v[0][0] >= 4 or (v[0][0] == 3 and v[0][1] >= 9)
+
+
def _normalize_image_id(string: str) -> str:
return string.replace("/", "_") + ".img"
@@ -133,6 +143,7 @@ def __init__(
def get_image(
dockerRequirement: Dict[str, str],
pull_image: bool,
+ tmp_outdir_prefix: str,
force_pull: bool = False,
) -> bool:
"""
@@ -155,7 +166,50 @@ def get_image(
elif is_version_2_6() and "SINGULARITY_PULLFOLDER" in os.environ:
cache_folder = os.environ["SINGULARITY_PULLFOLDER"]
- if "dockerImageId" not in dockerRequirement and "dockerPull" in dockerRequirement:
+ if "dockerFile" in dockerRequirement:
+ if cache_folder is None: # if environment variables were not set
+ cache_folder = create_tmp_dir(tmp_outdir_prefix)
+
+ absolute_path = os.path.abspath(cache_folder)
+ if "dockerImageId" in dockerRequirement:
+ image_name = dockerRequirement["dockerImageId"]
+ image_path = os.path.join(absolute_path, image_name)
+ if os.path.exists(image_path):
+ found = True
+ if found is False:
+ dockerfile_path = os.path.join(absolute_path, "Dockerfile")
+ singularityfile_path = dockerfile_path + ".def"
+ # if you do not set APPTAINER_TMPDIR will crash
+ # WARNING: 'nodev' mount option set on /tmp, it could be a
+ # source of failure during build process
+ # FATAL: Unable to create build: 'noexec' mount option set on
+ # /tmp, temporary root filesystem won't be usable at this location
+ with open(dockerfile_path, "w") as dfile:
+ dfile.write(dockerRequirement["dockerFile"])
+
+ singularityfile = SingularityWriter(DockerParser(dockerfile_path).parse()).convert()
+ with open(singularityfile_path, "w") as file:
+ file.write(singularityfile)
+
+ os.environ["APPTAINER_TMPDIR"] = absolute_path
+ singularity_options = ["--fakeroot"] if not shutil.which("proot") else []
+ if "dockerImageId" in dockerRequirement:
+ Client.build(
+ recipe=singularityfile_path,
+ build_folder=absolute_path,
+ image=dockerRequirement["dockerImageId"],
+ sudo=False,
+ options=singularity_options,
+ )
+ else:
+ Client.build(
+ recipe=singularityfile_path,
+ build_folder=absolute_path,
+ sudo=False,
+ options=singularity_options,
+ )
+ found = True
+ elif "dockerImageId" not in dockerRequirement and "dockerPull" in dockerRequirement:
match = re.search(pattern=r"([a-z]*://)", string=dockerRequirement["dockerPull"])
img_name = _normalize_image_id(dockerRequirement["dockerPull"])
candidates.append(img_name)
@@ -236,13 +290,6 @@ def get_image(
check_call(cmd, stdout=sys.stderr) # nosec
found = True
- elif "dockerFile" in dockerRequirement:
- raise SourceLine(
- dockerRequirement, "dockerFile", WorkflowException, debug
- ).makeError(
- "dockerFile is not currently supported when using the "
- "Singularity runtime for Docker containers."
- )
elif "dockerLoad" in dockerRequirement:
if is_version_3_1_or_newer():
if "dockerImageId" in dockerRequirement:
@@ -291,21 +338,30 @@ def get_from_requirements(
if not bool(shutil.which("singularity")):
raise WorkflowException("singularity executable is not available")
- if not self.get_image(cast(Dict[str, str], r), pull_image, force_pull):
+ if not self.get_image(cast(Dict[str, str], r), pull_image, tmp_outdir_prefix, force_pull):
raise WorkflowException("Container image {} not found".format(r["dockerImageId"]))
- return os.path.abspath(cast(str, r["dockerImageId"]))
+ if "CWL_SINGULARITY_CACHE" in os.environ:
+ cache_folder = os.environ["CWL_SINGULARITY_CACHE"]
+ img_path = os.path.join(cache_folder, cast(str, r["dockerImageId"]))
+ else:
+ img_path = cast(str, r["dockerImageId"])
+
+ return os.path.abspath(img_path)
@staticmethod
def append_volume(runtime: List[str], source: str, target: str, writable: bool = False) -> None:
"""Add binding arguments to the runtime list."""
- runtime.append("--bind")
- # Mounts are writable by default, so 'rw' is optional and not
- # supported (due to a bug) in some 3.6 series releases.
- vol = f"{source}:{target}"
- if not writable:
- vol += ":ro"
- runtime.append(vol)
+ if is_version_3_9_or_newer():
+ DockerCommandLineJob.append_volume(runtime, source, target, writable, skip_mkdirs=True)
+ else:
+ runtime.append("--bind")
+ # Mounts are writable by default, so 'rw' is optional and not
+ # supported (due to a bug) in some 3.6 series releases.
+ vol = f"{source}:{target}"
+ if not writable:
+ vol += ":ro"
+ runtime.append(vol)
def add_file_or_directory_volume(
self, runtime: List[str], volume: MapperEnt, host_outdir_tgt: Optional[str]
@@ -334,7 +390,7 @@ def add_writable_file_volume(
if self.inplace_update:
try:
os.link(os.path.realpath(volume.resolved), host_outdir_tgt)
- except os.error:
+ except OSError:
shutil.copy(volume.resolved, host_outdir_tgt)
else:
shutil.copy(volume.resolved, host_outdir_tgt)
diff --git a/cwltool/software_requirements.py b/cwltool/software_requirements.py
index 7e114bb86..ec99bda05 100644
--- a/cwltool/software_requirements.py
+++ b/cwltool/software_requirements.py
@@ -12,6 +12,7 @@
import string
from typing import (
TYPE_CHECKING,
+ Any,
Dict,
List,
MutableMapping,
@@ -75,6 +76,8 @@ def __init__(self, args: argparse.Namespace) -> None:
self.dependency_resolvers_config_file = None
else:
self.use_tool_dependencies = False
+ if self.tool_dependency_dir and not os.path.exists(self.tool_dependency_dir):
+ os.makedirs(self.tool_dependency_dir)
def build_job_script(self, builder: "Builder", command: List[str]) -> str:
ensure_galaxy_lib_available()
@@ -85,15 +88,15 @@ def build_job_script(self, builder: "Builder", command: List[str]) -> str:
app_config = {
"conda_auto_install": True,
"conda_auto_init": True,
+ "debug": builder.debug,
}
tool_dependency_manager: "deps.DependencyManager" = deps.build_dependency_manager(
app_config_dict=app_config,
resolution_config_dict=resolution_config_dict,
conf_file=self.dependency_resolvers_config_file,
)
- dependencies = get_dependencies(builder)
- handle_dependencies = "" # str
- if dependencies:
+ handle_dependencies: str = ""
+ if dependencies := get_dependencies(builder):
handle_dependencies = "\n".join(
tool_dependency_manager.dependency_shell_commands(
dependencies, job_directory=builder.tmpdir
@@ -107,7 +110,7 @@ def build_job_script(self, builder: "Builder", command: List[str]) -> str:
def get_dependencies(builder: HasReqsHints) -> ToolRequirements:
(software_requirement, _) = builder.get_requirement("SoftwareRequirement")
- dependencies: List["ToolRequirement"] = []
+ dependencies: List[Union["ToolRequirement", Dict[str, Any]]] = []
if software_requirement and software_requirement.get("packages"):
packages = cast(
MutableSequence[MutableMapping[str, Union[str, MutableSequence[str]]]],
@@ -156,7 +159,7 @@ def get_container_from_software_requirements(
[DOCKER_CONTAINER_TYPE], tool_info
)
if container_description:
- return cast(Optional[str], container_description.identifier)
+ return str(container_description.identifier)
return None
diff --git a/cwltool/task_queue.py b/cwltool/task_queue.py
index eed705269..59b1609e9 100644
--- a/cwltool/task_queue.py
+++ b/cwltool/task_queue.py
@@ -55,8 +55,8 @@ def _task_queue_func(self) -> None:
return
try:
task()
- except BaseException as e:
- _logger.exception("Unhandled exception running task")
+ except BaseException as e: # noqa: B036
+ _logger.exception("Unhandled exception running task", exc_info=e)
self.error = e
finally:
with self.lock:
diff --git a/cwltool/udocker.py b/cwltool/udocker.py
index 6ab54ff40..6598d6a7c 100644
--- a/cwltool/udocker.py
+++ b/cwltool/udocker.py
@@ -9,6 +9,12 @@ class UDockerCommandLineJob(DockerCommandLineJob):
"""Runs a CommandLineJob in a software container using the udocker engine."""
@staticmethod
- def append_volume(runtime: List[str], source: str, target: str, writable: bool = False) -> None:
+ def append_volume(
+ runtime: List[str],
+ source: str,
+ target: str,
+ writable: bool = False,
+ skip_mkdirs: bool = False,
+ ) -> None:
"""Add binding arguments to the runtime list."""
runtime.append("--volume={}:{}:{}".format(source, target, "rw" if writable else "ro"))
diff --git a/cwltool/utils.py b/cwltool/utils.py
index 455231847..219f91830 100644
--- a/cwltool/utils.py
+++ b/cwltool/utils.py
@@ -1,5 +1,14 @@
"""Shared functions and other definitions."""
+
import collections
+
+try:
+ import fcntl
+except ImportError:
+ # Guard against `from .utils import ...` on windows.
+ # See windows_check() in main.py
+ pass
+import importlib.metadata
import os
import random
import shutil
@@ -16,22 +25,24 @@
from itertools import zip_longest
from pathlib import Path, PurePosixPath
from tempfile import NamedTemporaryFile
-from types import ModuleType
from typing import (
IO,
TYPE_CHECKING,
Any,
Callable,
+ Deque,
Dict,
Generator,
Iterable,
List,
+ Literal,
MutableMapping,
MutableSequence,
NamedTuple,
Optional,
Set,
Tuple,
+ TypedDict,
Union,
cast,
)
@@ -39,15 +50,14 @@
import requests
from cachecontrol import CacheControl
from cachecontrol.caches import FileCache
-from mypy_extensions import TypedDict, mypyc_attr
+from mypy_extensions import mypyc_attr
from schema_salad.exceptions import ValidationException
from schema_salad.ref_resolver import Loader
-from typing_extensions import Deque, Literal
-if sys.version_info >= (3, 8):
- import importlib.metadata as importlib_metadata
+if sys.version_info >= (3, 9):
+ from importlib.resources import as_file, files
else:
- import importlib_metadata
+ from importlib_resources import as_file, files
if TYPE_CHECKING:
from .command_line_tool import CallbackJob, ExpressionJob
@@ -55,6 +65,8 @@
from .stdfsaccess import StdFsAccess
from .workflow_job import WorkflowJob
+__all__ = ["files", "as_file"]
+
__random_outdir: Optional[str] = None
CONTENT_LIMIT = 64 * 1024
@@ -63,27 +75,14 @@
processes_to_kill: Deque["subprocess.Popen[str]"] = collections.deque()
-CWLOutputAtomType = Union[
- None,
- bool,
- str,
- int,
- float,
- MutableSequence[
- Union[None, bool, str, int, float, MutableSequence[Any], MutableMapping[str, Any]]
- ],
- MutableMapping[
- str,
- Union[None, bool, str, int, float, MutableSequence[Any], MutableMapping[str, Any]],
- ],
-]
CWLOutputType = Union[
+ None,
bool,
str,
int,
float,
- MutableSequence[CWLOutputAtomType],
- MutableMapping[str, CWLOutputAtomType],
+ MutableSequence["CWLOutputType"],
+ MutableMapping[str, "CWLOutputType"],
]
CWLObjectType = MutableMapping[str, Optional[CWLOutputType]]
"""Typical raw dictionary found in lightly parsed CWL."""
@@ -99,16 +98,16 @@
DirectoryType = TypedDict(
"DirectoryType", {"class": str, "listing": List[CWLObjectType], "basename": str}
)
-JSONAtomType = Union[Dict[str, Any], List[Any], str, int, float, bool, None]
-JSONType = Union[Dict[str, JSONAtomType], List[JSONAtomType], str, int, float, bool, None]
-WorkflowStateItem = NamedTuple(
- "WorkflowStateItem",
- [
- ("parameter", CWLObjectType),
- ("value", Optional[CWLOutputType]),
- ("success", str),
- ],
-)
+JSONType = Union[Dict[str, "JSONType"], List["JSONType"], str, int, float, bool, None]
+
+
+class WorkflowStateItem(NamedTuple):
+ """Workflow state item."""
+
+ parameter: CWLObjectType
+ value: Optional[CWLOutputType]
+ success: str
+
ParametersType = List[CWLObjectType]
StepType = CWLObjectType # WorkflowStep
@@ -118,8 +117,7 @@
def versionstring() -> str:
"""Version of CWLtool used to execute the workflow."""
- pkg = importlib_metadata.version("cwltool")
- if pkg:
+ if pkg := importlib.metadata.version("cwltool"):
return f"{sys.argv[0]} {pkg}"
return "{} {}".format(sys.argv[0], "unknown version")
@@ -243,29 +241,12 @@ def random_outdir() -> str:
return __random_outdir
-#
-# Simple multi-platform (fcntl/msvrt) file locking wrapper
-#
-fcntl: Optional[ModuleType] = None
-msvcrt: Optional[ModuleType] = None
-try:
- import fcntl
-except ImportError:
- import msvcrt
-
-
def shared_file_lock(fd: IO[Any]) -> None:
- if fcntl:
- fcntl.flock(fd.fileno(), fcntl.LOCK_SH)
- elif msvcrt:
- msvcrt.locking(fd.fileno(), msvcrt.LK_LOCK, 1024)
+ fcntl.flock(fd.fileno(), fcntl.LOCK_SH)
def upgrade_lock(fd: IO[Any]) -> None:
- if fcntl:
- fcntl.flock(fd.fileno(), fcntl.LOCK_EX)
- elif msvcrt:
- pass
+ fcntl.flock(fd.fileno(), fcntl.LOCK_EX)
def adjustFileObjs(rec: Any, op: Union[Callable[..., Any], "partial[Any]"]) -> None:
@@ -310,7 +291,7 @@ def get_listing(fs_access: "StdFsAccess", rec: CWLObjectType, recursive: bool =
return
if "listing" in rec:
return
- listing: List[CWLOutputAtomType] = []
+ listing: List[CWLOutputType] = []
loc = cast(str, rec["location"])
for ld in fs_access.listdir(loc):
parse = urllib.parse.urlparse(ld)
@@ -395,8 +376,8 @@ def add_writable_flag(p: str) -> None:
if os.path.isdir(path):
if include_root:
add_writable_flag(path)
- for root, dirs, files in os.walk(path):
- for name in files:
+ for root, dirs, files_ in os.walk(path):
+ for name in files_:
add_writable_flag(os.path.join(root, name))
for name in dirs:
add_writable_flag(os.path.join(root, name))
@@ -407,8 +388,8 @@ def add_writable_flag(p: str) -> None:
def ensure_non_writable(path: str) -> None:
"""Attempt to change permissions to ensure that a path is not writable."""
if os.path.isdir(path):
- for root, dirs, files in os.walk(path):
- for name in files:
+ for root, dirs, files_ in os.walk(path):
+ for name in files_:
j = os.path.join(root, name)
st = os.stat(j)
mode = stat.S_IMODE(st.st_mode)
diff --git a/cwltool/validate_js.py b/cwltool/validate_js.py
index 27a7ace66..de4adaa14 100644
--- a/cwltool/validate_js.py
+++ b/cwltool/validate_js.py
@@ -18,7 +18,6 @@
from cwl_utils.errors import SubstitutionError
from cwl_utils.expression import scanner as scan_expression
from cwl_utils.sandboxjs import code_fragment_to_js, exec_js_process
-from importlib_resources import files
from ruamel.yaml.comments import CommentedMap, CommentedSeq
from schema_salad.avro.schema import (
ArraySchema,
@@ -33,6 +32,7 @@
from .errors import WorkflowException
from .loghandler import _logger
+from .utils import files
def is_expression(tool: Any, schema: Optional[Schema]) -> bool:
@@ -87,20 +87,12 @@ def get_expressions(
if not isinstance(tool, MutableSequence):
return []
- def tmp_expr(
- x: Tuple[int, Union[CommentedMap, str, CommentedSeq]]
- ) -> List[Tuple[str, Optional[SourceLine]]]:
- # using a lambda for this broke mypyc v0.910 and before
- return get_expressions(
- x[1],
- cast(ArraySchema, schema).items,
- SourceLine(tool, x[0], include_traceback=debug),
- )
-
return list(
itertools.chain(
*map(
- tmp_expr,
+ lambda x: get_expressions(
+ x[1], getattr(schema, "items"), SourceLine(tool, x[0]) # noqa: B009
+ ),
enumerate(tool),
)
)
diff --git a/cwltool/workflow.py b/cwltool/workflow.py
index 3f61cd521..8546ca72e 100644
--- a/cwltool/workflow.py
+++ b/cwltool/workflow.py
@@ -17,6 +17,7 @@
)
from uuid import UUID
+from mypy_extensions import mypyc_attr
from ruamel.yaml.comments import CommentedMap
from schema_salad.exceptions import ValidationException
from schema_salad.sourceline import SourceLine, indent
@@ -66,6 +67,7 @@ def default_make_tool(toolpath_object: CommentedMap, loadingContext: LoadingCont
context.default_make_tool = default_make_tool
+@mypyc_attr(serializable=True)
class Workflow(Process):
def __init__(
self,
diff --git a/docs/conf.py b/docs/conf.py
index 94c3e1e64..6e04b5d64 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -6,21 +6,24 @@
# -- Path setup --------------------------------------------------------------
+import importlib.metadata
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
-from datetime import datetime
import time
+from datetime import datetime, timezone
sys.path.insert(0, os.path.abspath(".."))
# -- Project information -----------------------------------------------------
-build_date = datetime.utcfromtimestamp(int(os.environ.get("SOURCE_DATE_EPOCH", time.time())))
+build_date = datetime.fromtimestamp(
+ int(os.environ.get("SOURCE_DATE_EPOCH", time.time())), timezone.utc
+)
project = "Common Workflow Language reference implementation"
copyright = f"2019 — {build_date.year}, Peter Amstutz and contributors to the CWL Project"
author = "Peter Amstutz and Common Workflow Language Project contributors"
@@ -81,12 +84,7 @@
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ["_static"]
-
-if sys.version_info >= (3, 8):
- import importlib.metadata as importlib_metadata
-else:
- import importlib_metadata
-release = importlib_metadata.version("cwltool")
+release = importlib.metadata.version("cwltool")
version = ".".join(release.split(".")[:2])
autoapi_dirs = ["../cwltool"]
diff --git a/docs/pythonversions.rst b/docs/pythonversions.rst
index b3a34e36f..74b1a1b03 100644
--- a/docs/pythonversions.rst
+++ b/docs/pythonversions.rst
@@ -2,16 +2,18 @@
Python version support policy
=============================
-Cwltool will always support `stable Python 3 releases with active branches
-`_.
+`cwltool` will always support `Python 3 versions that are officially supported by the Python Software Foundation
+`_.
-For versions that are no longer supported by Python upstream, cwltool
-support also extends to the default Python version included in the
+For versions that are no longer supported by the Python Software Foundation (or "upstream" for short), cwltool
+support also extends to the latest Python versions included in the
following major Linux distributions:
-* Debian (`stable `_, `oldstable `_)
+* Debian (`stable `_)
* Ubuntu (`LTS release standard support `_)
-* Centos 7 (`while in maintenance `_)
+
+This means that users may need to install a newer version of Python
+from their Linux distributor if the default version is too old.
If there is a conflict between a third party package dependency which
has dropped support for a Python version that cwltool should support
@@ -22,30 +24,34 @@ and downstream users before making the decision to drop support for a
Python version before the date outlined in this policy. The reasoning
for dropping support for a Python version should be outlined here.
-As of February 2022, here are approximate cwltool support periods for
-across Python versions:
+As of 2023-08-14, here are approximate cwltool support periods for Python versions (`EOL` == "End of Life", the end of the support period by that provider):
====== ======================
Python cwltool end of support
====== ======================
-2.7 ended January 2020
-3.5 ended October 2020
-3.6 June 2024 (Centos 7 EOL)
-3.7 June 2023 (upstream EOL)
-3.8 April 2025 (Ubuntu 20.04 EOL)
-3.9 October 2025 (upstream EOL)
-3.10 October 2026 (upstream EOL)
+2.7 ended 2020-01 (upstream EOL)
+3.5 ended 2020-10 (upstream EOL)
+3.6 ended 2023-08-31 (change in cwltool policy)
+3.7 ended 2023-07-27 (upstream EOL)
+3.8 2024-10-14 (upstream EOL)
+3.9 2025-10-01 (upstream EOL)
+3.10 2027-04-01 (Ubuntu 22.04 LTS EOL)
+3.11 2027-10-01 (upstream EOL)
+3.12 2028-10-01 (planned upstream EOL)
+3.13 2029-10-01 (planned upstream EOL)
====== ======================
-Default Python version of supported Linux distributions, for reference
-(as of February 2022)
-
-====== =============================================
-Python Linux distros where it is the default version
-====== =============================================
-3.6 Ubuntu 18.04, Centos 7
-3.7 Debian 10
-3.8 Ubuntu 20.04
-3.9 Debian 11
-3.10 None
-====== =============================================
+Python version of supported Linux distributions, for reference
+(as of August 2023)
+
+============== =============================================
+Python Version Linux distros where it is a supported version
+============== =============================================
+3.6 Ubuntu 18.04 LTS
+3.7 Debian 10
+3.8 Ubuntu 20.04 LTS
+3.9 Debian 11, Ubuntu 20.04 LTS
+3.10 Ubuntu 22.04 LTS
+3.11 Debian 12
+3.12 Debian 13 (planned)
+============== =============================================
diff --git a/docs/requirements.txt b/docs/requirements.txt
index 02247c681..40a2eefbf 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,7 +1,6 @@
sphinx >= 2.2
-sphinx-rtd-theme==1.2.1
+sphinx-rtd-theme==2.0.0
sphinx-autoapi
sphinx-autodoc-typehints
-typed_ast;python_version<'3.8'
sphinxcontrib-autoprogram
-importlib_resources
+importlib_resources;python_version<'3.9'
diff --git a/gittaggers.py b/gittaggers.py
deleted file mode 100644
index ab54b3e05..000000000
--- a/gittaggers.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import subprocess
-import sys
-import time
-
-if sys.version_info >= (3, 8):
- import importlib.metadata as importlib_metadata
-else:
- import importlib_metadata
-
-from typing import Any
-
-from setuptools.command.egg_info import egg_info
-
-SETUPTOOLS_VER = importlib_metadata.version("setuptools").split(".")
-
-RECENT_SETUPTOOLS = (
- int(SETUPTOOLS_VER[0]) > 40
- or (int(SETUPTOOLS_VER[0]) == 40 and int(SETUPTOOLS_VER[1]) > 0)
- or (int(SETUPTOOLS_VER[0]) == 40 and int(SETUPTOOLS_VER[1]) == 0 and int(SETUPTOOLS_VER[2]) > 0)
-)
-
-
-class EggInfoFromGit(egg_info):
- """Tag the build with git commit timestamp.
-
- If a build tag has already been set (e.g., "egg_info -b", building
- from source package), leave it alone.
- """
-
- def git_timestamp_tag(self) -> str:
- gitinfo = subprocess.check_output(
- ["git", "log", "--first-parent", "--max-count=1", "--format=format:%ct", "."]
- ).strip()
- return time.strftime(".%Y%m%d%H%M%S", time.gmtime(int(gitinfo)))
-
- def tags(self) -> Any:
- if self.tag_build is None:
- try:
- self.tag_build = self.git_timestamp_tag()
- except subprocess.CalledProcessError:
- pass
- return egg_info.tags(self) # type: ignore[no-untyped-call]
-
- if RECENT_SETUPTOOLS:
- vtags = property(tags)
diff --git a/lint-requirements.txt b/lint-requirements.txt
index 0b8dc8bff..9a13c3b07 100644
--- a/lint-requirements.txt
+++ b/lint-requirements.txt
@@ -1,3 +1,3 @@
-flake8-bugbear < 23.6
-black ~= 23.3
+flake8-bugbear<24.3
+black~=24.2
codespell
diff --git a/mypy-requirements.txt b/mypy-requirements.txt
index 14c64f3b0..d89b7d1b8 100644
--- a/mypy-requirements.txt
+++ b/mypy-requirements.txt
@@ -1,8 +1,9 @@
-mypy==1.3.0 # also update pyproject.toml
-ruamel.yaml>=0.16.0,<0.18
-schema-salad>=8.4,<9
-cwl-utils >=0.22
+mypy==1.9.0 # also update pyproject.toml
+ruamel.yaml>=0.16.0,<0.19
+cwl-utils>=0.32
types-requests
types-setuptools
types-psutil
types-mock
+galaxy-tool-util>=22.1.2,!=23.0.1,!=23.0.2,!=23.0.3,!=23.0.4,!=23.0.5,<23.3
+galaxy-util<23.2
diff --git a/mypy-stubs/argcomplete/__init__.pyi b/mypy-stubs/argcomplete/__init__.pyi
deleted file mode 100644
index f9204a07b..000000000
--- a/mypy-stubs/argcomplete/__init__.pyi
+++ /dev/null
@@ -1,18 +0,0 @@
-import argparse
-from typing import Any, Optional
-
-class CompletionFinder:
- def __call__(
- self,
- argument_parser: argparse.ArgumentParser,
- always_complete_options: bool = ...,
- exit_method: Any = ...,
- output_stream: Optional[Any] = ...,
- exclude: Optional[Any] = ...,
- validator: Optional[Any] = ...,
- print_suppressed: bool = ...,
- append_space: Optional[Any] = ...,
- default_completer: Any = ...,
- ) -> None: ...
-
-autocomplete: CompletionFinder
diff --git a/mypy-stubs/cachecontrol/__init__.pyi b/mypy-stubs/cachecontrol/__init__.pyi
deleted file mode 100644
index d8c9745da..000000000
--- a/mypy-stubs/cachecontrol/__init__.pyi
+++ /dev/null
@@ -1,9 +0,0 @@
-# Stubs for cachecontrol (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-from .wrapper import CacheControl as CacheControl
-
-__email__ = ... # type: Any
diff --git a/mypy-stubs/cachecontrol/cache.pyi b/mypy-stubs/cachecontrol/cache.pyi
deleted file mode 100644
index 04b98f2ea..000000000
--- a/mypy-stubs/cachecontrol/cache.pyi
+++ /dev/null
@@ -1,5 +0,0 @@
-class BaseCache:
- def get(self, key: str) -> str | None: ...
- def set(self, key: str, value: str) -> None: ...
- def delete(self, key: str) -> None: ...
- def close(self) -> None: ...
diff --git a/mypy-stubs/cachecontrol/caches/__init__.pyi b/mypy-stubs/cachecontrol/caches/__init__.pyi
deleted file mode 100644
index 42c0ad685..000000000
--- a/mypy-stubs/cachecontrol/caches/__init__.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-# Stubs for cachecontrol.caches (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-from .file_cache import FileCache as FileCache
-
-# from .redis_cache import RedisCache as RedisCache
-
-notice = ... # type: Any
diff --git a/mypy-stubs/cachecontrol/caches/file_cache.pyi b/mypy-stubs/cachecontrol/caches/file_cache.pyi
deleted file mode 100644
index 229c143ba..000000000
--- a/mypy-stubs/cachecontrol/caches/file_cache.pyi
+++ /dev/null
@@ -1,31 +0,0 @@
-from os import PathLike
-from typing import ContextManager
-
-from ..cache import BaseCache as BaseCache
-from ..controller import CacheController as CacheController
-
-class _LockClass:
- path: str
-
-_lock_class = ContextManager[_LockClass]
-
-class FileCache(BaseCache):
- directory: str | PathLike[str]
- forever: bool
- filemode: str
- dirmode: str
- lock_class: _lock_class
- def __init__(
- self,
- directory: str | PathLike[str],
- forever: bool = ...,
- filemode: int = ...,
- dirmode: int = ...,
- use_dir_lock: bool | None = ...,
- lock_class: _lock_class | None = ...,
- ) -> None: ...
- @staticmethod
- def encode(x: str) -> str: ...
- def get(self, key: str) -> None | str: ...
- def set(self, key: str, value: str) -> None: ...
- def delete(self, key: str) -> None: ...
diff --git a/mypy-stubs/cachecontrol/compat.pyi b/mypy-stubs/cachecontrol/compat.pyi
deleted file mode 100644
index fabfae3f8..000000000
--- a/mypy-stubs/cachecontrol/compat.pyi
+++ /dev/null
@@ -1,7 +0,0 @@
-# Stubs for cachecontrol.compat (Python 2)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-str = ... # type: Any
diff --git a/mypy-stubs/cachecontrol/controller.pyi b/mypy-stubs/cachecontrol/controller.pyi
deleted file mode 100644
index 5118fab02..000000000
--- a/mypy-stubs/cachecontrol/controller.pyi
+++ /dev/null
@@ -1,17 +0,0 @@
-from _typeshed import Incomplete
-
-logger: Incomplete
-URI: Incomplete
-
-class CacheController:
- cache: Incomplete
- cache_etags: Incomplete
- serializer: Incomplete
- cacheable_status_codes: Incomplete
- def __init__(
- self,
- cache: Incomplete | None = ...,
- cache_etags: bool = ...,
- serializer: Incomplete | None = ...,
- status_codes: Incomplete | None = ...,
- ) -> None: ...
diff --git a/mypy-stubs/cachecontrol/wrapper.pyi b/mypy-stubs/cachecontrol/wrapper.pyi
deleted file mode 100644
index a4da67289..000000000
--- a/mypy-stubs/cachecontrol/wrapper.pyi
+++ /dev/null
@@ -1,13 +0,0 @@
-from _typeshed import Incomplete
-from requests import Session
-
-def CacheControl(
- sess: Session,
- cache: Incomplete | None = ...,
- cache_etags: bool = ...,
- serializer: Incomplete | None = ...,
- heuristic: Incomplete | None = ...,
- controller_class: Incomplete | None = ...,
- adapter_class: Incomplete | None = ...,
- cacheable_methods: Incomplete | None = ...,
-) -> Session: ...
diff --git a/mypy-stubs/graphviz/__init__.pyi b/mypy-stubs/graphviz/__init__.pyi
deleted file mode 100644
index 023952bbe..000000000
--- a/mypy-stubs/graphviz/__init__.pyi
+++ /dev/null
@@ -1,12 +0,0 @@
-# Stubs for graphviz (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-# from .backend import ENGINES as ENGINES, ExecutableNotFound as ExecutableNotFound, FORMATS as FORMATS, pipe as pipe, render as render, version as version, view as view
-# from .dot import Digraph as Digraph, Graph as Graph
-# from .files import Source as Source
-# from .lang import nohtml as nohtml
-
-# ENGINES = ENGINES
-# FORMATS = FORMATS
-# ExecutableNotFound = ExecutableNotFound
diff --git a/mypy-stubs/graphviz/_compat.pyi b/mypy-stubs/graphviz/_compat.pyi
deleted file mode 100644
index 4308df0ad..000000000
--- a/mypy-stubs/graphviz/_compat.pyi
+++ /dev/null
@@ -1,15 +0,0 @@
-# Stubs for graphviz._compat (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-PY2: Any
-string_classes: Any
-text_type = unicode
-
-def iteritems(d): ...
-def makedirs(name, mode: int = ..., exist_ok: bool = ...): ...
-def stderr_write_binary(data): ...
-
-text_type = str
diff --git a/mypy-stubs/graphviz/backend.pyi b/mypy-stubs/graphviz/backend.pyi
deleted file mode 100644
index 1582b9768..000000000
--- a/mypy-stubs/graphviz/backend.pyi
+++ /dev/null
@@ -1,11 +0,0 @@
-# Stubs for graphviz.backend (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-class ExecutableNotFound(RuntimeError):
- def __init__(self, args) -> None: ...
-
-def render(engine, format, filepath, quiet: bool = ...): ...
-def pipe(engine, format, data, quiet: bool = ...): ...
-def version(): ...
-def view(filepath): ...
diff --git a/mypy-stubs/graphviz/dot.pyi b/mypy-stubs/graphviz/dot.pyi
deleted file mode 100644
index 45627097c..000000000
--- a/mypy-stubs/graphviz/dot.pyi
+++ /dev/null
@@ -1,76 +0,0 @@
-# Stubs for graphviz.dot (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Optional
-
-from . import files
-
-class Dot(files.File):
- name: Any = ...
- comment: Any = ...
- graph_attr: Any = ...
- node_attr: Any = ...
- edge_attr: Any = ...
- body: Any = ...
- strict: Any = ...
- def __init__(
- self,
- name: Optional[Any] = ...,
- comment: Optional[Any] = ...,
- filename: Optional[Any] = ...,
- directory: Optional[Any] = ...,
- format: Optional[Any] = ...,
- engine: Optional[Any] = ...,
- encoding: Any = ...,
- graph_attr: Optional[Any] = ...,
- node_attr: Optional[Any] = ...,
- edge_attr: Optional[Any] = ...,
- body: Optional[Any] = ...,
- strict: bool = ...,
- ) -> None: ...
- def clear(self, keep_attrs: bool = ...): ...
- def __iter__(self, subgraph: bool = ...): ...
- source: Any = ...
- def node(
- self,
- name,
- label: Optional[Any] = ...,
- _attributes: Optional[Any] = ...,
- **attrs,
- ): ...
- def edge(
- self,
- tail_name,
- head_name,
- label: Optional[Any] = ...,
- _attributes: Optional[Any] = ...,
- **attrs,
- ): ...
- def edges(self, tail_head_iter): ...
- def attr(self, kw: Optional[Any] = ..., _attributes: Optional[Any] = ..., **attrs): ...
- def subgraph(
- self,
- graph: Optional[Any] = ...,
- name: Optional[Any] = ...,
- comment: Optional[Any] = ...,
- graph_attr: Optional[Any] = ...,
- node_attr: Optional[Any] = ...,
- edge_attr: Optional[Any] = ...,
- body: Optional[Any] = ...,
- ): ...
-
-class SubgraphContext:
- parent: Any = ...
- graph: Any = ...
- def __init__(self, parent, kwargs) -> None: ...
- def __enter__(self): ...
- def __exit__(self, type_, value, traceback): ...
-
-class Graph(Dot):
- @property
- def directed(self): ...
-
-class Digraph(Dot):
- @property
- def directed(self): ...
diff --git a/mypy-stubs/graphviz/files.pyi b/mypy-stubs/graphviz/files.pyi
deleted file mode 100644
index b0b8bdedc..000000000
--- a/mypy-stubs/graphviz/files.pyi
+++ /dev/null
@@ -1,73 +0,0 @@
-# Stubs for graphviz.files (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Optional
-
-class Base:
- @property
- def format(self): ...
- @format.setter
- def format(self, format): ...
- @property
- def engine(self): ...
- @engine.setter
- def engine(self, engine): ...
- @property
- def encoding(self): ...
- @encoding.setter
- def encoding(self, encoding): ...
- def copy(self): ...
-
-class File(Base):
- directory: str = ...
- filename: Any = ...
- format: Any = ...
- engine: Any = ...
- encoding: Any = ...
- def __init__(
- self,
- filename: Optional[Any] = ...,
- directory: Optional[Any] = ...,
- format: Optional[Any] = ...,
- engine: Optional[Any] = ...,
- encoding: Any = ...,
- ) -> None: ...
- def pipe(self, format: Optional[Any] = ...): ...
- @property
- def filepath(self): ...
- def save(self, filename: Optional[Any] = ..., directory: Optional[Any] = ...): ...
- def render(
- self,
- filename: Optional[Any] = ...,
- directory: Optional[Any] = ...,
- view: bool = ...,
- cleanup: bool = ...,
- ): ...
- def view(
- self,
- filename: Optional[Any] = ...,
- directory: Optional[Any] = ...,
- cleanup: bool = ...,
- ): ...
-
-class Source(File):
- @classmethod
- def from_file(
- cls,
- filename,
- directory: Optional[Any] = ...,
- format: Optional[Any] = ...,
- engine: Optional[Any] = ...,
- encoding: Any = ...,
- ): ...
- source: Any = ...
- def __init__(
- self,
- source,
- filename: Optional[Any] = ...,
- directory: Optional[Any] = ...,
- format: Optional[Any] = ...,
- engine: Optional[Any] = ...,
- encoding: Any = ...,
- ) -> None: ...
diff --git a/mypy-stubs/graphviz/lang.pyi b/mypy-stubs/graphviz/lang.pyi
deleted file mode 100644
index 28f163ea9..000000000
--- a/mypy-stubs/graphviz/lang.pyi
+++ /dev/null
@@ -1,20 +0,0 @@
-# Stubs for graphviz.lang (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any, Optional
-
-def quote(identifier, html: Any = ..., valid_id: Any = ..., dot_keywords: Any = ...): ...
-def quote_edge(identifier): ...
-def a_list(
- label: Optional[Any] = ...,
- kwargs: Optional[Any] = ...,
- attributes: Optional[Any] = ...,
-): ...
-def attr_list(
- label: Optional[Any] = ...,
- kwargs: Optional[Any] = ...,
- attributes: Optional[Any] = ...,
-): ...
-
-class NoHtml: ...
diff --git a/mypy-stubs/graphviz/tools.pyi b/mypy-stubs/graphviz/tools.pyi
deleted file mode 100644
index abad3ca9b..000000000
--- a/mypy-stubs/graphviz/tools.pyi
+++ /dev/null
@@ -1,9 +0,0 @@
-# Stubs for graphviz.tools (Python 3.5)
-#
-# NOTE: This dynamically typed stub was automatically generated by stubgen.
-
-from typing import Any
-
-def attach(object, name): ...
-def mkdirs(filename, mode: int = ...): ...
-def mapping_items(mapping, _iteritems: Any = ...): ...
diff --git a/mypy-stubs/importlib_metadata/__init__.pyi b/mypy-stubs/importlib_metadata/__init__.pyi
deleted file mode 100644
index ea406deac..000000000
--- a/mypy-stubs/importlib_metadata/__init__.pyi
+++ /dev/null
@@ -1,162 +0,0 @@
-import abc
-import pathlib
-import sys
-from collections.abc import Iterable, Mapping
-from email.message import Message
-from importlib.abc import MetaPathFinder
-from os import PathLike
-from pathlib import Path
-from re import Pattern
-from typing import Any, ClassVar, NamedTuple, overload
-
-from _typeshed import StrPath
-from typing_extensions import Self
-
-__all__ = [
- "Distribution",
- "DistributionFinder",
- "PackageNotFoundError",
- "distribution",
- "distributions",
- "entry_points",
- "files",
- "metadata",
- "requires",
- "version",
-]
-
-if sys.version_info >= (3, 10):
- __all__ += ["PackageMetadata", "packages_distributions"]
-
-if sys.version_info >= (3, 10):
- from importlib.metadata._meta import PackageMetadata as PackageMetadata
- def packages_distributions() -> Mapping[str, list[str]]: ...
-
-class PackageNotFoundError(ModuleNotFoundError):
- @property
- def name(self) -> str: ... # type: ignore[override]
-
-class _EntryPointBase(NamedTuple):
- name: str
- value: str
- group: str
-
-class EntryPoint(_EntryPointBase):
- pattern: ClassVar[Pattern[str]]
- if sys.version_info >= (3, 11):
- def __init__(self, name: str, value: str, group: str) -> None: ...
-
- def load(self) -> Any: ... # Callable[[], Any] or an importable module
- @property
- def extras(self) -> list[str]: ...
- if sys.version_info >= (3, 9):
- @property
- def module(self) -> str: ...
- @property
- def attr(self) -> str: ...
- if sys.version_info >= (3, 10):
- dist: ClassVar[Distribution | None]
- def matches(
- self,
- *,
- name: str = ...,
- value: str = ...,
- group: str = ...,
- module: str = ...,
- attr: str = ...,
- extras: list[str] = ...,
- ) -> bool: ... # undocumented
-
-class PackagePath(pathlib.PurePosixPath):
- def read_text(self, encoding: str = "utf-8") -> str: ...
- def read_binary(self) -> bytes: ...
- def locate(self) -> PathLike[str]: ...
- # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files:
- hash: FileHash | None
- size: int | None
- dist: Distribution
-
-class FileHash:
- mode: str
- value: str
- def __init__(self, spec: str) -> None: ...
-
-class Distribution:
- @abc.abstractmethod
- def read_text(self, filename: str) -> str | None: ...
- @abc.abstractmethod
- def locate_file(self, path: StrPath) -> PathLike[str]: ...
- @classmethod
- def from_name(cls, name: str) -> Distribution: ...
- @overload
- @classmethod
- def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ...
- @overload
- @classmethod
- def discover(
- cls, *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any
- ) -> Iterable[Distribution]: ...
- @staticmethod
- def at(path: StrPath) -> PathDistribution: ...
-
- if sys.version_info >= (3, 10):
- @property
- def metadata(self) -> PackageMetadata: ...
- @property
- def entry_points(self) -> EntryPoints: ...
- else:
- @property
- def metadata(self) -> Message: ...
- @property
- def entry_points(self) -> list[EntryPoint]: ...
-
- @property
- def version(self) -> str: ...
- @property
- def files(self) -> list[PackagePath] | None: ...
- @property
- def requires(self) -> list[str] | None: ...
- if sys.version_info >= (3, 10):
- @property
- def name(self) -> str: ...
-
-class DistributionFinder(MetaPathFinder):
- class Context:
- name: str | None
- def __init__(
- self, *, name: str | None = ..., path: list[str] = ..., **kwargs: Any
- ) -> None: ...
- @property
- def path(self) -> list[str]: ...
-
- @abc.abstractmethod
- def find_distributions(
- self, context: DistributionFinder.Context = ...
- ) -> Iterable[Distribution]: ...
-
-class MetadataPathFinder(DistributionFinder):
- @classmethod
- def find_distributions(
- cls, context: DistributionFinder.Context = ...
- ) -> Iterable[PathDistribution]: ...
- if sys.version_info >= (3, 10):
- # Yes, this is an instance method that has argumend named "cls"
- def invalidate_caches(cls) -> None: ...
-
-class PathDistribution(Distribution):
- def __init__(self, path: Path) -> None: ...
- def read_text(self, filename: StrPath) -> str: ...
- def locate_file(self, path: StrPath) -> PathLike[str]: ...
-
-def distribution(distribution_name: str) -> Distribution: ...
-@overload
-def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ...
-@overload
-def distributions(
- *, context: None = None, name: str | None = ..., path: list[str] = ..., **kwargs: Any
-) -> Iterable[Distribution]: ...
-def metadata(distribution_name: str) -> Message: ...
-def entry_points() -> dict[str, list[EntryPoint]]: ...
-def version(distribution_name: str) -> str: ...
-def files(distribution_name: str) -> list[PackagePath] | None: ...
-def requires(distribution_name: str) -> list[str] | None: ...
diff --git a/mypy-stubs/importlib_metadata/_meta.pyi b/mypy-stubs/importlib_metadata/_meta.pyi
deleted file mode 100644
index e3504fe40..000000000
--- a/mypy-stubs/importlib_metadata/_meta.pyi
+++ /dev/null
@@ -1,22 +0,0 @@
-from collections.abc import Iterator
-from typing import Any, Protocol, TypeVar
-
-_T = TypeVar("_T")
-
-class PackageMetadata(Protocol):
- def __len__(self) -> int: ...
- def __contains__(self, item: str) -> bool: ...
- def __getitem__(self, key: str) -> str: ...
- def __iter__(self) -> Iterator[str]: ...
- def get_all(self, name: str, failobj: _T = ...) -> list[Any] | _T: ...
- @property
- def json(self) -> dict[str, str | list[str]]: ...
-
-class SimplePath(Protocol):
- def joinpath(self) -> SimplePath: ...
- def parent(self) -> SimplePath: ...
- def read_text(self) -> str: ...
- # There was a bug in `SimplePath` definition in cpython, see #8451
- # Strictly speaking `__div__` was defined in 3.10, not __truediv__,
- # but it should have always been `__truediv__`.
- def __truediv__(self) -> SimplePath: ...
diff --git a/mypy-stubs/prov/model.pyi b/mypy-stubs/prov/model.pyi
index ee2688a4d..19a13bcf0 100644
--- a/mypy-stubs/prov/model.pyi
+++ b/mypy-stubs/prov/model.pyi
@@ -223,10 +223,9 @@ class ProvBundle:
) -> QualifiedName | None: ...
def get_records(
self,
- class_or_type_or_tuple: type
- | type[int | str]
- | Tuple[type | type[int | str] | Tuple[Any, ...], ...]
- | None = ...,
+ class_or_type_or_tuple: (
+ type | type[int | str] | Tuple[type | type[int | str] | Tuple[Any, ...], ...] | None
+ ) = ...,
) -> List[ProvRecord]: ...
def get_record(self, identifier: Identifier | None) -> ProvRecord | List[ProvRecord] | None: ...
def is_document(self) -> bool: ...
diff --git a/mypy-stubs/spython/main/__init__.pyi b/mypy-stubs/spython/main/__init__.pyi
new file mode 100644
index 000000000..adced1f3a
--- /dev/null
+++ b/mypy-stubs/spython/main/__init__.pyi
@@ -0,0 +1,9 @@
+from typing import Iterator, Optional
+
+from .base import Client as _BaseClient
+from .build import build as base_build
+
+class _Client(_BaseClient):
+ build = base_build
+
+Client = _Client()
diff --git a/mypy-stubs/spython/main/base/__init__.pyi b/mypy-stubs/spython/main/base/__init__.pyi
new file mode 100644
index 000000000..111997914
--- /dev/null
+++ b/mypy-stubs/spython/main/base/__init__.pyi
@@ -0,0 +1,3 @@
+class Client:
+ def __init__(self) -> None: ...
+ def version(self) -> str: ...
diff --git a/mypy-stubs/spython/main/build.pyi b/mypy-stubs/spython/main/build.pyi
new file mode 100644
index 000000000..098ba3436
--- /dev/null
+++ b/mypy-stubs/spython/main/build.pyi
@@ -0,0 +1,23 @@
+from typing import Iterator, Optional
+
+from .base import Client
+
+def build(
+ self: Client,
+ recipe: Optional[str] = ...,
+ image: Optional[str] = ...,
+ isolated: Optional[bool] = ...,
+ sandbox: Optional[bool] = ...,
+ writable: Optional[bool] = ...,
+ build_folder: Optional[str] = ...,
+ robot_name: Optional[bool] = ...,
+ ext: Optional[str] = ...,
+ sudo: Optional[bool] = ...,
+ stream: Optional[bool] = ...,
+ force: Optional[bool] = ...,
+ options: Optional[list[str]] | None = ...,
+ quiet: Optional[bool] = ...,
+ return_result: Optional[bool] = ...,
+ sudo_options: Optional[str | list[str]] = ...,
+ singularity_options: Optional[list[str]] = ...,
+) -> tuple[str, Iterator[str]]: ...
diff --git a/mypy-stubs/spython/main/parse/parsers/base.pyi b/mypy-stubs/spython/main/parse/parsers/base.pyi
new file mode 100644
index 000000000..23eef9975
--- /dev/null
+++ b/mypy-stubs/spython/main/parse/parsers/base.pyi
@@ -0,0 +1,14 @@
+import abc
+
+from ..recipe import Recipe
+
+class ParserBase(metaclass=abc.ABCMeta):
+ filename: str
+ lines: list[str]
+ args: dict[str, str]
+ active_layer: str
+ active_layer_num: int
+ recipe: dict[str, Recipe]
+ def __init__(self, filename: str, load: bool = ...) -> None: ...
+ @abc.abstractmethod
+ def parse(self) -> dict[str, Recipe]: ...
diff --git a/mypy-stubs/spython/main/parse/parsers/docker.pyi b/mypy-stubs/spython/main/parse/parsers/docker.pyi
new file mode 100644
index 000000000..8adb8a547
--- /dev/null
+++ b/mypy-stubs/spython/main/parse/parsers/docker.pyi
@@ -0,0 +1,7 @@
+from ..recipe import Recipe
+from .base import ParserBase as ParserBase
+
+class DockerParser(ParserBase):
+ name: str
+ def __init__(self, filename: str = ..., load: bool = ...) -> None: ...
+ def parse(self) -> dict[str, Recipe]: ...
diff --git a/mypy-stubs/spython/main/parse/recipe.pyi b/mypy-stubs/spython/main/parse/recipe.pyi
new file mode 100644
index 000000000..dabd4ebc5
--- /dev/null
+++ b/mypy-stubs/spython/main/parse/recipe.pyi
@@ -0,0 +1,19 @@
+from typing import Optional
+
+class Recipe:
+ cmd: Optional[str]
+ comments: list[str]
+ entrypoint: Optional[str]
+ environ: list[str]
+ files: list[str]
+ layer_files: dict[str, str]
+ install: list[str]
+ labels: list[str]
+ ports: list[str]
+ test: Optional[str]
+ volumes: list[str]
+ workdir: Optional[str]
+ layer: int
+ fromHeader: Optional[str]
+ source: Optional[Recipe]
+ def __init__(self, recipe: Optional[Recipe] = ..., layer: int = ...) -> None: ...
diff --git a/mypy-stubs/spython/main/parse/writers/base.pyi b/mypy-stubs/spython/main/parse/writers/base.pyi
new file mode 100644
index 000000000..3b4fe12da
--- /dev/null
+++ b/mypy-stubs/spython/main/parse/writers/base.pyi
@@ -0,0 +1,6 @@
+from ..recipe import Recipe
+
+class WriterBase:
+ recipe: dict[str, Recipe]
+ def __init__(self, recipe: dict[str, Recipe] | None = ...) -> None: ...
+ def write(self, output_file: str | None = ..., force: bool = ...) -> None: ...
diff --git a/mypy-stubs/spython/main/parse/writers/singularity.pyi b/mypy-stubs/spython/main/parse/writers/singularity.pyi
new file mode 100644
index 000000000..c80198461
--- /dev/null
+++ b/mypy-stubs/spython/main/parse/writers/singularity.pyi
@@ -0,0 +1,10 @@
+from typing import Optional
+
+from ..recipe import Recipe
+from .base import WriterBase as WriterBase
+
+class SingularityWriter(WriterBase):
+ name: str
+ def __init__(self, recipe: Optional[dict[str, Recipe]] = ...) -> None: ...
+ def validate(self) -> None: ...
+ def convert(self, runscript: str = ..., force: bool = ...) -> str: ...
diff --git a/mypy-stubs/subprocess.pyi b/mypy-stubs/subprocess.pyi
deleted file mode 100644
index 4af9814fa..000000000
--- a/mypy-stubs/subprocess.pyi
+++ /dev/null
@@ -1,1096 +0,0 @@
-import sys
-from types import TracebackType
-from typing import (
- IO,
- Any,
- AnyStr,
- Callable,
- Generic,
- Iterable,
- Mapping,
- Sequence,
- Tuple,
- Type,
- TypeVar,
- Union,
- overload,
-)
-
-from _typeshed import Self, StrOrBytesPath
-from typing_extensions import Literal
-
-if sys.version_info >= (3, 9):
- from types import GenericAlias
-
-# We prefer to annotate inputs to methods (eg subprocess.check_call) with these
-# union types.
-# For outputs we use laborious literal based overloads to try to determine
-# which specific return types to use, and prefer to fall back to Any when
-# this does not work, so the caller does not have to use an assertion to confirm
-# which type.
-#
-# For example:
-#
-# try:
-# x = subprocess.check_output(["ls", "-l"])
-# reveal_type(x) # bytes, based on the overloads
-# except TimeoutError as e:
-# reveal_type(e.cmd) # Any, but morally is _CMD
-_FILE = Union[None, int, IO[Any]]
-_TXT = Union[bytes, str]
-if sys.version_info >= (3, 8):
- _CMD = Union[StrOrBytesPath, Sequence[StrOrBytesPath]]
-else:
- # Python 3.6 doesn't support _CMD being a single PathLike.
- # See: https://bugs.python.org/issue31961
- _CMD = Union[_TXT, Sequence[StrOrBytesPath]]
-if sys.platform == "win32":
- _ENV = Mapping[str, str]
-else:
- _ENV = Union[Mapping[bytes, StrOrBytesPath], Mapping[str, StrOrBytesPath]]
-
-_T = TypeVar("_T")
-
-class CompletedProcess(Generic[_T]):
- # morally: _CMD
- args: Any
- returncode: int | None # this optional is REQUIRED for mypyc
- # These can both be None, but requiring checks for None would be tedious
- # and writing all the overloads would be horrific.
- stdout: _T
- stderr: _T
- def __init__(
- self,
- args: _CMD,
- returncode: int,
- stdout: _T | None = ...,
- stderr: _T | None = ...,
- ) -> None: ...
- def check_returncode(self) -> None: ...
- if sys.version_info >= (3, 9):
- def __class_getitem__(cls, item: Any) -> GenericAlias: ...
-
-if sys.version_info >= (3, 7):
- # Nearly the same args as for 3.6, except for capture_output and text
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- capture_output: bool = ...,
- check: bool = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- input: str | None = ...,
- text: Literal[True],
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- capture_output: bool = ...,
- check: bool = ...,
- encoding: str,
- errors: str | None = ...,
- input: str | None = ...,
- text: bool | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- capture_output: bool = ...,
- check: bool = ...,
- encoding: str | None = ...,
- errors: str,
- input: str | None = ...,
- text: bool | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- *,
- universal_newlines: Literal[True],
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- # where the *real* keyword only args start
- capture_output: bool = ...,
- check: bool = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- input: str | None = ...,
- text: bool | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: Literal[False] = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- capture_output: bool = ...,
- check: bool = ...,
- encoding: None = ...,
- errors: None = ...,
- input: bytes | None = ...,
- text: Literal[None, False] = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[bytes]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- capture_output: bool = ...,
- check: bool = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- input: _TXT | None = ...,
- text: bool | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[Any]: ...
-
-else:
- # Nearly same args as Popen.__init__ except for timeout, input, and check
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- check: bool = ...,
- encoding: str,
- errors: str | None = ...,
- input: str | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- check: bool = ...,
- encoding: str | None = ...,
- errors: str,
- input: str | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- *,
- universal_newlines: Literal[True],
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- # where the *real* keyword only args start
- check: bool = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- input: str | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[str]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: Literal[False] = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- check: bool = ...,
- encoding: None = ...,
- errors: None = ...,
- input: bytes | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[bytes]: ...
- @overload
- def run(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- check: bool = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- input: _TXT | None = ...,
- timeout: float | None = ...,
- ) -> CompletedProcess[Any]: ...
-
-# Same args as Popen.__init__
-def call(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
-) -> int: ...
-
-# Same args as Popen.__init__
-def check_call(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath = ...,
- stdin: _FILE = ...,
- stdout: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- timeout: float | None = ...,
-) -> int: ...
-
-if sys.version_info >= (3, 7):
- # 3.7 added text
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- text: Literal[True],
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str,
- errors: str | None = ...,
- text: bool | None = ...,
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str,
- text: bool | None = ...,
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- *,
- universal_newlines: Literal[True],
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- # where the real keyword only ones start
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- text: bool | None = ...,
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: Literal[False] = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: None = ...,
- errors: None = ...,
- text: Literal[None, False] = ...,
- ) -> bytes: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- text: bool | None = ...,
- ) -> Any: ... # morally: -> _TXT
-
-else:
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str,
- errors: str | None = ...,
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str,
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- universal_newlines: Literal[True],
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> str: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: Literal[False] = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: None = ...,
- errors: None = ...,
- ) -> bytes: ...
- @overload
- def check_output(
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE = ...,
- stderr: _FILE = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- timeout: float | None = ...,
- input: _TXT | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> Any: ... # morally: -> _TXT
-
-PIPE: int
-STDOUT: int
-DEVNULL: int
-
-class SubprocessError(Exception): ...
-
-class TimeoutExpired(SubprocessError):
- def __init__(
- self,
- cmd: _CMD,
- timeout: float,
- output: _TXT | None = ...,
- stderr: _TXT | None = ...,
- ) -> None: ...
- # morally: _CMD
- cmd: Any
- timeout: float
- # morally: _TXT | None
- output: Any
- stdout: Any
- stderr: Any
-
-class CalledProcessError(SubprocessError):
- returncode: int | None # this optional is REQUIRED for mypyc
- # morally: _CMD
- cmd: Any
- # morally: _TXT | None
- output: Any
-
- # morally: _TXT | None
- stdout: Any
- stderr: Any
- def __init__(
- self,
- returncode: int,
- cmd: _CMD,
- output: _TXT | None = ...,
- stderr: _TXT | None = ...,
- ) -> None: ...
-
-class Popen(Generic[AnyStr]):
- args: _CMD
- stdin: IO[AnyStr] | None
- stdout: IO[AnyStr] | None
- stderr: IO[AnyStr] | None
- pid: int
- returncode: int | None # this optional is REQUIRED for mypyc
- universal_newlines: bool
-
- # Technically it is wrong that Popen provides __new__ instead of __init__
- # but this shouldn't come up hopefully?
-
- if sys.version_info >= (3, 7):
- # text is added in 3.7
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- text: bool | None = ...,
- encoding: str,
- errors: str | None = ...,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- text: bool | None = ...,
- encoding: str | None = ...,
- errors: str,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- *,
- universal_newlines: Literal[True],
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- # where the *real* keyword only args start
- text: bool | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- text: Literal[True],
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: Literal[False] = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- text: Literal[None, False] = ...,
- encoding: None = ...,
- errors: None = ...,
- ) -> Popen[bytes]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- text: bool | None = ...,
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> Popen[Any]: ...
- else:
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- encoding: str,
- errors: str | None = ...,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- encoding: str | None = ...,
- errors: str,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- *,
- universal_newlines: Literal[True],
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- # where the *real* keyword only args start
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> Popen[str]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: Literal[False] = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- encoding: None = ...,
- errors: None = ...,
- ) -> Popen[bytes]: ...
- @overload
- def __new__(
- cls,
- args: _CMD,
- bufsize: int = ...,
- executable: StrOrBytesPath | None = ...,
- stdin: _FILE | None = ...,
- stdout: _FILE | None = ...,
- stderr: _FILE | None = ...,
- preexec_fn: Callable[[], Any] | None = ...,
- close_fds: bool = ...,
- shell: bool = ...,
- cwd: StrOrBytesPath | None = ...,
- env: _ENV | None = ...,
- universal_newlines: bool = ...,
- startupinfo: Any | None = ...,
- creationflags: int = ...,
- restore_signals: bool = ...,
- start_new_session: bool = ...,
- pass_fds: Any = ...,
- *,
- encoding: str | None = ...,
- errors: str | None = ...,
- ) -> Popen[Any]: ...
-
- def poll(self) -> int | None: ...
- if sys.version_info >= (3, 7):
- def wait(self, timeout: float | None = ...) -> int: ...
- else:
- def wait(self, timeout: float | None = ..., endtime: float | None = ...) -> int: ...
- # Return str/bytes
- def communicate(
- self,
- input: AnyStr | None = ...,
- timeout: float | None = ...,
- # morally this should be optional
- ) -> Tuple[AnyStr, AnyStr]: ...
- def send_signal(self, sig: int) -> None: ...
- def terminate(self) -> None: ...
- def kill(self) -> None: ...
- def __enter__(self: Self) -> Self: ...
- def __exit__(
- self,
- type: Type[BaseException] | None,
- value: BaseException | None,
- traceback: TracebackType | None,
- ) -> None: ...
- if sys.version_info >= (3, 9):
- def __class_getitem__(cls, item: Any) -> GenericAlias: ...
-
-# The result really is always a str.
-def getstatusoutput(cmd: _TXT) -> Tuple[int, str]: ...
-def getoutput(cmd: _TXT) -> str: ...
-def list2cmdline(seq: Iterable[str]) -> str: ... # undocumented
-
-if sys.platform == "win32":
- class STARTUPINFO:
- if sys.version_info >= (3, 7):
- def __init__(
- self,
- *,
- dwFlags: int = ...,
- hStdInput: Any | None = ...,
- hStdOutput: Any | None = ...,
- hStdError: Any | None = ...,
- wShowWindow: int = ...,
- lpAttributeList: Mapping[str, Any] | None = ...,
- ) -> None: ...
- dwFlags: int
- hStdInput: Any | None
- hStdOutput: Any | None
- hStdError: Any | None
- wShowWindow: int
- if sys.version_info >= (3, 7):
- lpAttributeList: Mapping[str, Any]
- STD_INPUT_HANDLE: Any
- STD_OUTPUT_HANDLE: Any
- STD_ERROR_HANDLE: Any
- SW_HIDE: int
- STARTF_USESTDHANDLES: int
- STARTF_USESHOWWINDOW: int
- CREATE_NEW_CONSOLE: int
- CREATE_NEW_PROCESS_GROUP: int
- if sys.version_info >= (3, 7):
- ABOVE_NORMAL_PRIORITY_CLASS: int
- BELOW_NORMAL_PRIORITY_CLASS: int
- HIGH_PRIORITY_CLASS: int
- IDLE_PRIORITY_CLASS: int
- NORMAL_PRIORITY_CLASS: int
- REALTIME_PRIORITY_CLASS: int
- CREATE_NO_WINDOW: int
- DETACHED_PROCESS: int
- CREATE_DEFAULT_ERROR_MODE: int
- CREATE_BREAKAWAY_FROM_JOB: int
diff --git a/mypy-stubs/urllib/__init__.py b/mypy-stubs/urllib/__init__.py
deleted file mode 100644
index e69de29bb..000000000
diff --git a/mypy-stubs/urllib/parse.pyi b/mypy-stubs/urllib/parse.pyi
deleted file mode 100644
index 81b2db34e..000000000
--- a/mypy-stubs/urllib/parse.pyi
+++ /dev/null
@@ -1,190 +0,0 @@
-# Stubs for urllib.parse
-import sys
-from typing import (
- Any,
- AnyStr,
- Callable,
- Dict,
- Generic,
- Iterator,
- List,
- Mapping,
- NamedTuple,
- Optional,
- Sequence,
- Tuple,
- Union,
- overload,
-)
-
-_Str = Union[bytes, str]
-
-uses_relative: List[str]
-uses_netloc: List[str]
-uses_params: List[str]
-non_hierarchical: List[str]
-uses_query: List[str]
-uses_fragment: List[str]
-scheme_chars: str
-MAX_CACHE_SIZE = 0
-
-class _ResultMixinBase(Generic[AnyStr]):
- def geturl(self) -> AnyStr: ...
-
-class _ResultMixinStr(_ResultMixinBase[str]):
- def encode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinBytes: ...
-
-class _ResultMixinBytes(_ResultMixinBase[str]):
- def decode(self, encoding: str = ..., errors: str = ...) -> _ResultMixinStr: ...
-
-class _NetlocResultMixinBase(Generic[AnyStr]):
- username: AnyStr
- password: AnyStr
- hostname: AnyStr
- port: int
-
-class _NetlocResultMixinStr(_NetlocResultMixinBase[str], _ResultMixinStr): ...
-class _NetlocResultMixinBytes(_NetlocResultMixinBase[bytes], _ResultMixinBytes): ...
-
-class _DefragResultBase(Generic[AnyStr]):
- url: AnyStr
- fragment: AnyStr
- @overload
- def __getitem__(self, x: slice) -> AnyStr: ...
- @overload
- def __getitem__(self, x: int) -> AnyStr: ...
- def __iter__(self) -> Iterator[AnyStr]: ...
-
-_SplitResultBase = NamedTuple(
- "_SplitResultBase",
- [
- ("scheme", str),
- ("netloc", str),
- ("path", str),
- ("query", str),
- ("fragment", str),
- ],
-)
-_SplitResultBytesBase = NamedTuple(
- "_SplitResultBytesBase",
- [
- ("scheme", bytes),
- ("netloc", bytes),
- ("path", bytes),
- ("query", bytes),
- ("fragment", bytes),
- ],
-)
-
-_ParseResultBase = NamedTuple(
- "_ParseResultBase",
- [
- ("scheme", str),
- ("netloc", str),
- ("path", str),
- ("params", str),
- ("query", str),
- ("fragment", str),
- ],
-)
-_ParseResultBytesBase = NamedTuple(
- "_ParseResultBytesBase",
- [
- ("scheme", bytes),
- ("netloc", bytes),
- ("path", bytes),
- ("params", bytes),
- ("query", bytes),
- ("fragment", bytes),
- ],
-)
-
-# Structured result objects for string data
-class DefragResult(_DefragResultBase[str], _ResultMixinStr): ...
-class SplitResult(_SplitResultBase, _NetlocResultMixinStr): ...
-class ParseResult(_ParseResultBase, _NetlocResultMixinStr): ...
-
-# Structured result objects for bytes data
-class DefragResultBytes(_DefragResultBase[bytes], _ResultMixinBytes): ...
-class SplitResultBytes(_SplitResultBytesBase, _NetlocResultMixinBytes): ...
-class ParseResultBytes(_ParseResultBytesBase, _NetlocResultMixinBytes): ...
-
-def parse_qs(
- qs: AnyStr,
- keep_blank_values: bool = ...,
- strict_parsing: bool = ...,
- encoding: str = ...,
- errors: str = ...,
-) -> Dict[AnyStr, List[AnyStr]]: ...
-def parse_qsl(
- qs: AnyStr,
- keep_blank_values: bool = ...,
- strict_parsing: bool = ...,
- encoding: str = ...,
- errors: str = ...,
-) -> List[Tuple[AnyStr, AnyStr]]: ...
-@overload
-def quote(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ...
-@overload
-def quote(string: bytes, safe: _Str = ...) -> str: ...
-def quote_from_bytes(bs: bytes, safe: _Str = ...) -> str: ...
-@overload
-def quote_plus(string: str, safe: _Str = ..., encoding: str = ..., errors: str = ...) -> str: ...
-@overload
-def quote_plus(string: bytes, safe: _Str = ...) -> str: ...
-def unquote(string: str, encoding: str = ..., errors: str = ...) -> str: ...
-def unquote_to_bytes(string: _Str) -> bytes: ...
-def unquote_plus(string: str, encoding: str = ..., errors: str = ...) -> str: ...
-@overload
-def urldefrag(url: str) -> DefragResult: ...
-@overload
-def urldefrag(url: bytes) -> DefragResultBytes: ...
-
-if sys.version_info >= (3, 5):
- def urlencode(
- query: Union[
- Mapping[Any, Any],
- Mapping[Any, Sequence[Any]],
- Sequence[Tuple[Any, Any]],
- Sequence[Tuple[Any, Sequence[Any]]],
- ],
- doseq: bool = ...,
- safe: AnyStr = ...,
- encoding: str = ...,
- errors: str = ...,
- quote_via: Callable[[str, AnyStr, str, str], str] = ...,
- ) -> str: ...
-
-else:
- def urlencode(
- query: Union[
- Mapping[Any, Any],
- Mapping[Any, Sequence[Any]],
- Sequence[Tuple[Any, Any]],
- Sequence[Tuple[Any, Sequence[Any]]],
- ],
- doseq: bool = ...,
- safe: AnyStr = ...,
- encoding: str = ...,
- errors: str = ...,
- ) -> str: ...
-
-def urljoin(
- base: Optional[AnyStr], url: Optional[AnyStr], allow_fragments: bool = ...
-) -> AnyStr: ...
-@overload
-def urlparse(url: str, scheme: str = ..., allow_fragments: bool = ...) -> ParseResult: ...
-@overload
-def urlparse(url: bytes, scheme: bytes = ..., allow_fragments: bool = ...) -> ParseResultBytes: ...
-@overload
-def urlsplit(url: Optional[str], scheme: str = ..., allow_fragments: bool = ...) -> SplitResult: ...
-@overload
-def urlsplit(url: bytes, scheme: bytes = ..., allow_fragments: bool = ...) -> SplitResultBytes: ...
-@overload
-def urlunparse(components: Tuple[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]) -> AnyStr: ...
-@overload
-def urlunparse(components: Sequence[AnyStr]) -> AnyStr: ...
-@overload
-def urlunsplit(components: Tuple[AnyStr, AnyStr, AnyStr, AnyStr, AnyStr]) -> AnyStr: ...
-@overload
-def urlunsplit(components: Sequence[AnyStr]) -> AnyStr: ...
diff --git a/pyproject.toml b/pyproject.toml
index f8526fab8..09789e45a 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,18 +1,22 @@
[build-system]
requires = [
"setuptools>=45",
- 'mypy==0.971; python_version == "3.6"', # last version for Python 3.6
- 'mypy==1.3.0; python_version >= "3.7"', # also update mypy-requirements.txt
+ "setuptools_scm[toml]>=8.0.4,<9",
+ "mypy==1.9.0", # also update mypy-requirements.txt
"types-requests",
"types-psutil",
- "importlib_resources>=1.4", # equivalent to Python 3.9
+ "importlib_resources>=1.4;python_version<'3.9'",
"ruamel.yaml>=0.16.0,<0.18",
- "schema-salad>=8.2.20211104054942,<9",
- "cwl-utils >=0.19",
- 'toml',
+ "schema-salad>=8.4.20230426093816,<9",
+ "cwl-utils>=0.32",
+ "toml",
+ "argcomplete>=1.12.0",
]
build-backend = "setuptools.build_meta"
+[tool.setuptools_scm]
+write_to = "cwltool/_version.py"
+
[tool.black]
line-length = 100
-target-version = [ "py36" ]
+target-version = [ "py38" ]
diff --git a/release-test.sh b/release-test.sh
index 257a4e631..a5e620391 100755
--- a/release-test.sh
+++ b/release-test.sh
@@ -23,10 +23,10 @@ run_tests() {
mod_loc=$(pip show ${package} |
grep ^Location | awk '{print $2}')/${module}
"${test_prefix}"bin/py.test "--ignore=${mod_loc}/schemas/" \
- --pyargs -x ${module} -n auto --dist=loadfile
+ --pyargs -x ${module} -n logical --dist=worksteal
}
-pipver=20.3.3 # minimum required version of pip for Python 3.10
-setuptoolsver=50.0.1 # fix for "AttributeError: module 'importlib.util' has no attribute 'abc'"
+pipver=23.1 # minimum required version of pip for Python 3.12
+setuptoolsver=67.6.1 # required for Python 3.12
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
rm -Rf testenv? || /bin/true
@@ -42,7 +42,7 @@ then
rm -f testenv1/lib/python-wheels/setuptools* \
&& pip install --force-reinstall -U pip==${pipver} \
&& pip install setuptools==${setuptoolsver} wheel
- pip install --no-build-isolation -rtest-requirements.txt ".${extras}"
+ pip install -rtest-requirements.txt ".${extras}"
#make test
pip uninstall -y ${package} || true; pip uninstall -y ${package} || true; make install
# mkdir testenv1/not-${module}
@@ -55,8 +55,7 @@ fi
python3 -m venv testenv2
python3 -m venv testenv3
-python3 -m venv testenv4
-rm -Rf testenv[234]/local
+rm -Rf testenv[23]/local
# Secondly we test via pip
@@ -69,7 +68,7 @@ rm -f lib/python-wheels/setuptools* \
# The following can fail if you haven't pushed your commits to ${repo}
pip install -e "git+${repo}@${HEAD}#egg=${package}${extras}"
pushd src/${package}
-pip install -rtest-requirements.txt
+pip install -rtest-requirements.txt build
make dist
#make test
cp dist/${package}*tar.gz ../../../testenv3/
@@ -89,15 +88,16 @@ rm -f lib/python-wheels/setuptools* \
&& pip install --force-reinstall -U pip==${pipver} \
&& pip install setuptools==${setuptoolsver} wheel
package_tar=$(find . -name "${package}*tar.gz")
-pip install "-r${DIR}/test-requirements.txt"
+pip install "-r${DIR}/test-requirements.txt" udocker build
pip install "${package_tar}${extras}"
+udocker install
mkdir out
tar --extract --directory=out -z -f ${package}*.tar.gz
pushd out/${package}*
make dist
make test
pip install "-r${DIR}/mypy-requirements.txt"
-make mypy
+make mypyc
pip uninstall -y ${package} || true; pip uninstall -y ${package} || true; make install
mkdir ../not-${module}
pushd ../not-${module}
diff --git a/requirements.txt b/requirements.txt
index 4c4f7298d..036c4eed6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,19 +1,15 @@
requests>=2.6.1
-ruamel.yaml>=0.15.0,<0.18
-ruamel.yaml>=0.16;python_version>='3.10'
-rdflib>=4.2.2,<6.4
-rdflib< 6.0.0;python_version<='3.6'
+ruamel.yaml>=0.16.0,<0.19
+rdflib>=4.2.2,<7.1
shellescape>=3.4.1,<3.9
-schema-salad>=8.4,<9
+schema-salad>=8.4.20230426093816,<9
prov==1.5.1
mypy-extensions
psutil>=5.6.6
-typing-extensions
-importlib_resources>=1.4 # equivalent to Python 3.9
-importlib_metadata;python_version<'3.8' # equivalent to Python 3.9
+importlib_resources>=1.4;python_version<'3.9'
coloredlogs
pydot>=1.4.1
argcomplete>=1.12.0
-pyparsing != 3.0.2 # breaks --print-dot (pydot) https://github.com/pyparsing/pyparsing/issues/319
-pyparsing < 3;python_version<='3.6' # breaks --print-dot
-cwl-utils>=0.22
+pyparsing!=3.0.2 # breaks --print-dot (pydot) https://github.com/pyparsing/pyparsing/issues/319
+cwl-utils>=0.32
+spython>=0.3.0
diff --git a/setup.py b/setup.py
index bc95743f6..63cb6ede3 100644
--- a/setup.py
+++ b/setup.py
@@ -3,9 +3,7 @@
import os
import sys
import warnings
-from typing import Type
-import setuptools.command.egg_info as egg_info_cmd
from setuptools import setup
if os.name == "nt":
@@ -25,13 +23,6 @@
SETUP_DIR = os.path.dirname(__file__)
README = os.path.join(SETUP_DIR, "README.rst")
-try:
- import gittaggers
-
- Tagger: Type[egg_info_cmd.egg_info] = gittaggers.EggInfoFromGit
-except ImportError:
- Tagger = egg_info_cmd.egg_info
-
NEEDS_PYTEST = {"pytest", "test", "ptr"}.intersection(sys.argv)
PYTEST_RUNNER = ["pytest-runner", "pytest-cov"] if NEEDS_PYTEST else []
USE_MYPYC = False
@@ -85,7 +76,7 @@
"cwltool/workflow.py",
]
- from mypyc.build import mypycify # type: ignore[import]
+ from mypyc.build import mypycify # type: ignore[import-untyped]
opt_level = os.getenv("MYPYC_OPT_LEVEL", "3")
ext_modules = mypycify(mypyc_targets, opt_level=opt_level)
@@ -94,7 +85,6 @@
setup(
name="cwltool",
- version="3.1",
description="Common workflow language reference implementation",
long_description=open(README).read(),
long_description_content_type="text/x-rst",
@@ -105,50 +95,70 @@
ext_modules=ext_modules,
# platforms='', # empty as is conveyed by the classifier below
# license='', # empty as is conveyed by the classifier below
- packages=["cwltool", "cwltool.tests", "cwltool.cwlprov"],
+ packages=[
+ "cwltool",
+ "cwltool.cwlprov",
+ "cwltool.jshint",
+ "cwltool.rdfqueries",
+ "cwltool.schemas",
+ "cwltool.tests",
+ "cwltool.tests.checker_wf",
+ "cwltool.tests.input_deps",
+ "cwltool.tests.loop",
+ "cwltool.tests.override",
+ "cwltool.tests.reloc",
+ "cwltool.tests.subgraph",
+ "cwltool.tests.test_deps_env",
+ "cwltool.tests.test_deps_env.modulefiles",
+ "cwltool.tests.tmp1.tmp2.tmp3",
+ "cwltool.tests.tmp4.alpha",
+ "cwltool.tests.trs",
+ "cwltool.tests.wf",
+ "cwltool.tests.wf.generator",
+ "cwltool.tests.wf.indir",
+ "cwltool.tests.wf.operation",
+ ],
package_dir={"cwltool.tests": "tests"},
include_package_data=True,
install_requires=[
"setuptools",
"requests >= 2.6.1", # >= 2.6.1 to workaround
# https://github.com/ionrock/cachecontrol/issues/137
- "ruamel.yaml >= 0.15, < 0.18",
- "ruamel.yaml >= 0.16.0;python_version>='3.10'",
- "rdflib >= 4.2.2, < 6.4.0",
- "rdflib < 6.0.0;python_version<='3.6'",
+ "ruamel.yaml >= 0.16, < 0.19",
+ "rdflib >= 4.2.2, < 7.1.0",
"shellescape >= 3.4.1, < 3.9",
- "schema-salad >= 8.4, < 9",
+ "schema-salad >= 8.4.20230426093816, < 9",
"prov == 1.5.1",
"mypy-extensions",
"psutil >= 5.6.6",
- "typing-extensions",
- "importlib_resources>=1.4",
- "importlib_metadata;python_version<'3.8'",
+ "importlib_resources>=1.4;python_version<'3.9'",
"coloredlogs",
"pydot >= 1.4.1",
"argcomplete",
"pyparsing != 3.0.2", # breaks --print-dot (pydot) https://github.com/pyparsing/pyparsing/issues/319
- "pyparsing < 3 ;python_version<='3.6'", # breaks --print-dot (pydot)
- "cwl-utils >= 0.22",
+ "cwl-utils >= 0.32",
+ "spython >= 0.3.0",
],
extras_require={
- "deps": ["galaxy-tool-util >= 22.1.2, <23"],
+ "deps": [
+ "galaxy-tool-util>=22.1.2,!=23.0.1,!=23.0.2,!=23.0.3,!=23.0.4,!=23.0.5,<23.3",
+ "galaxy-util <23.2",
+ ],
},
- python_requires=">=3.6, <4",
- setup_requires=PYTEST_RUNNER,
+ python_requires=">=3.8, <4",
+ use_scm_version=True,
+ setup_requires=PYTEST_RUNNER + ["setuptools_scm>=8.0.4,<9"],
test_suite="tests",
tests_require=[
"bagit >= 1.6.4, < 1.9",
- "pytest >= 6.2, < 7.4",
+ "pytest >= 6.2, < 8.2",
"mock >= 2.0.0",
"pytest-mock >= 1.10.0",
"pytest-httpserver",
"arcp >= 0.2.0",
- "rdflib-jsonld>=0.4.0, <= 0.6.1;python_version<='3.6'",
],
entry_points={"console_scripts": ["cwltool=cwltool.main:run"]},
zip_safe=True,
- cmdclass={"egg_info": Tagger},
classifiers=[
"Development Status :: 5 - Production/Stable",
"Environment :: Console",
@@ -161,12 +171,11 @@
"Operating System :: POSIX",
"Operating System :: POSIX :: Linux",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.6",
- "Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Scientific/Engineering :: Astronomy",
diff --git a/test-requirements.txt b/test-requirements.txt
index e8bb881b5..0085cde01 100644
--- a/test-requirements.txt
+++ b/test-requirements.txt
@@ -1,11 +1,13 @@
-bagit >= 1.6.4, <1.9
-pytest >= 6.2, < 7.4
-pytest-xdist
+bagit>=1.6.4,<1.9
+pytest>= 6.2,< 8.2
+pytest-xdist>=3.2.0 # for the worksteal scheduler
+psutil # enhances pytest-xdist to allow "-n logical"
pytest-httpserver
-mock >= 2.0.0
-pytest-mock >= 1.10.0
+pytest-retry;python_version>'3.9'
+mock>=2.0.0
+pytest-mock>=1.10.0
pytest-cov
-arcp >= 0.2.0
-rdflib-jsonld>=0.4.0, <= 0.6.1;python_version<='3.6'
+arcp>=0.2.0
-rrequirements.txt
-galaxy-tool-util >= 22.1.2, < 23
+galaxy-tool-util>=22.1.2,!=23.0.1,!=23.0.2,!=23.0.3,!=23.0.4,!=23.0.5,<23.3
+galaxy-util<23.2
diff --git a/tests/cwl-conformance/cwltool-conftest.py b/tests/cwl-conformance/cwltool-conftest.py
index e846b2706..3e2b83990 100644
--- a/tests/cwl-conformance/cwltool-conftest.py
+++ b/tests/cwl-conformance/cwltool-conftest.py
@@ -3,6 +3,7 @@
Calls cwltool via Python, instead of a subprocess via `--cwl-runner cwltool`.
"""
+
import json
from io import StringIO
from typing import Any, Dict, List, Optional, Tuple
diff --git a/tests/loop/scatter-inside-loop.cwl b/tests/loop/scatter-inside-loop.cwl
new file mode 100644
index 000000000..7a4c394be
--- /dev/null
+++ b/tests/loop/scatter-inside-loop.cwl
@@ -0,0 +1,53 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.2
+class: Workflow
+$namespaces:
+ cwltool: "http://commonwl.org/cwltool#"
+requirements:
+ InlineJavascriptRequirement: {}
+ ScatterFeatureRequirement: {}
+ SubworkflowFeatureRequirement: {}
+inputs:
+ i1: int[]
+ i2: int
+outputs:
+ o1:
+ type: int[]
+ outputSource: scatter/o1
+steps:
+ scatter:
+ run:
+ class: Workflow
+ inputs:
+ i1: int[]
+ i2: int
+ outputs:
+ o1:
+ type: int[]
+ outputSource: subworkflow/o1
+ steps:
+ subworkflow:
+ run:
+ class: ExpressionTool
+ inputs:
+ i1: int
+ i2: int
+ outputs:
+ o1: int
+ expression: >
+ ${return {'o1': inputs.i1 + inputs.i2};}
+ in:
+ i1: i1
+ i2: i2
+ out: [o1]
+ scatter: i1
+ in:
+ i1: i1
+ i2: i2
+ out: [o1]
+ requirements:
+ cwltool:Loop:
+ loopWhen: $(inputs.i1[0] < 10)
+ loop:
+ i1: o1
+ outputMethod: last
\ No newline at end of file
diff --git a/tests/nested-array.cwl b/tests/nested-array.cwl
new file mode 100644
index 000000000..8272614fc
--- /dev/null
+++ b/tests/nested-array.cwl
@@ -0,0 +1,11 @@
+cwlVersion: v1.2
+class: CommandLineTool
+baseCommand: echo
+inputs:
+ letters:
+ type: string[][]
+ inputBinding:
+ position: 1
+stdout: echo.txt
+outputs:
+ echo: stdout
diff --git a/tests/reloc/dir1/foo b/tests/reloc/dir1/foo
deleted file mode 100644
index e69de29bb..000000000
diff --git a/tests/reloc/dir2 b/tests/reloc/dir2
deleted file mode 120000
index df490f837..000000000
--- a/tests/reloc/dir2
+++ /dev/null
@@ -1 +0,0 @@
-dir1
\ No newline at end of file
diff --git a/tests/secondary-files-required-container.cwl b/tests/secondary-files-required-container.cwl
new file mode 100644
index 000000000..254470b4f
--- /dev/null
+++ b/tests/secondary-files-required-container.cwl
@@ -0,0 +1,20 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.2
+class: CommandLineTool
+
+hints:
+ DockerRequirement:
+ dockerPull: docker.io/alpine:latest
+
+inputs: []
+
+baseCommand: [ touch, file.ext1, file.ext2 ]
+
+outputs:
+ output:
+ type: File
+ secondaryFiles:
+ - pattern: ^.ext2
+ required: true
+ outputBinding:
+ glob: file.ext1
diff --git a/tests/secondary-files-required-missing-container.cwl b/tests/secondary-files-required-missing-container.cwl
new file mode 100644
index 000000000..931658ae9
--- /dev/null
+++ b/tests/secondary-files-required-missing-container.cwl
@@ -0,0 +1,20 @@
+#!/usr/bin/env cwl-runner
+cwlVersion: v1.2
+class: CommandLineTool
+
+hints:
+ DockerRequirement:
+ dockerPull: docker.io/alpine:latest
+
+inputs: []
+
+baseCommand: [ touch, file.ext1, file.ext2 ]
+
+outputs:
+ output:
+ type: File
+ secondaryFiles:
+ - pattern: ^.ext3
+ required: true
+ outputBinding:
+ glob: file.ext1
diff --git a/tests/seqtk_seq.cwl b/tests/seqtk_seq.cwl
index 4e1029a69..212e7a666 100755
--- a/tests/seqtk_seq.cwl
+++ b/tests/seqtk_seq.cwl
@@ -22,4 +22,4 @@ hints:
packages:
- package: seqtk
version:
- - r93
+ - "1.4"
diff --git a/tests/seqtk_seq_with_docker.cwl b/tests/seqtk_seq_with_docker.cwl
index aeff27f88..996db947e 100755
--- a/tests/seqtk_seq_with_docker.cwl
+++ b/tests/seqtk_seq_with_docker.cwl
@@ -22,6 +22,6 @@ hints:
packages:
- package: seqtk
version:
- - '1.2'
+ - '1.4'
DockerRequirement:
- dockerPull: quay.io/biocontainers/seqtk:1.2--0
+ dockerPull: quay.io/biocontainers/seqtk:1.4--he4a0461_1
diff --git a/tests/test_check.py b/tests/test_check.py
index b95993197..d53d4b71b 100644
--- a/tests/test_check.py
+++ b/tests/test_check.py
@@ -1,4 +1,5 @@
"""Confirm some known-bad CWL descriptions."""
+
import pytest
from cwltool.main import main
diff --git a/tests/test_cuda.py b/tests/test_cuda.py
index e8de7cd63..27dfae39d 100644
--- a/tests/test_cuda.py
+++ b/tests/test_cuda.py
@@ -284,7 +284,7 @@ def test_cuda_job_setup_check_err_wrong_type_cuda_version(
def test_cuda_eval_resource_range() -> None:
- with open(get_data("cwltool/extensions-v1.1.yml")) as res:
+ with open(get_data("extensions-v1.1.yml")) as res:
use_custom_schema("v1.2", "http://commonwl.org/cwltool", res.read())
joborder = {} # type: CWLObjectType
@@ -301,7 +301,7 @@ def test_cuda_eval_resource_range() -> None:
def test_cuda_eval_resource_max() -> None:
- with open(get_data("cwltool/extensions-v1.1.yml")) as res:
+ with open(get_data("extensions-v1.1.yml")) as res:
use_custom_schema("v1.2", "http://commonwl.org/cwltool", res.read())
joborder = {} # type: CWLObjectType
diff --git a/tests/test_dependencies.py b/tests/test_dependencies.py
index 639f68b7b..2acbf9f48 100644
--- a/tests/test_dependencies.py
+++ b/tests/test_dependencies.py
@@ -1,4 +1,5 @@
"""Tests of satisfying SoftwareRequirement via dependencies."""
+
import os
import tempfile
from getpass import getuser
@@ -17,13 +18,14 @@
deps: Optional[ModuleType] = None
try:
- from galaxy.tool_util import deps # type: ignore[no-redef]
+ from galaxy.tool_util import deps
except ImportError:
pass
@needs_docker
@pytest.mark.skipif(not deps, reason="galaxy-tool-util is not installed")
+@pytest.mark.flaky(retries=3)
def test_biocontainers(tmp_path: Path) -> None:
wflow = get_data("tests/seqtk_seq.cwl")
job = get_data("tests/seqtk_seq_job.json")
@@ -51,7 +53,7 @@ def test_biocontainers_resolution(tmp_path: Path) -> None:
get_container_from_software_requirements(
True, tool, container_image_cache_path=str(tmp_path)
)
- == "quay.io/biocontainers/seqtk:r93--0"
+ == "quay.io/biocontainers/seqtk:1.4--he4a0461_1"
)
diff --git a/tests/test_docker.py b/tests/test_docker.py
index d3d049270..26534d541 100644
--- a/tests/test_docker.py
+++ b/tests/test_docker.py
@@ -1,11 +1,21 @@
"""Tests for docker engine."""
+
+import json
import re
from pathlib import Path
from shutil import which
+import pytest
+
from cwltool.main import main
-from .util import get_data, get_main_output, needs_docker
+from .util import (
+ get_data,
+ get_main_output,
+ needs_docker,
+ needs_podman,
+ needs_singularity,
+)
@needs_docker
@@ -136,3 +146,138 @@ def test_docker_strict_memory_limit_warning(tmp_path: Path) -> None:
stderr = re.sub(r"\s\s+", " ", stderr)
assert result_code == 0
assert "Skipping Docker software container '--memory' limit" in stderr
+
+
+@needs_docker
+def test_docker_required_secfile(tmp_path: Path) -> None:
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--outdir",
+ str(tmp_path),
+ get_data("tests/secondary-files-required-container.cwl"),
+ ]
+ )
+ assert result_code == 0, stderr
+ assert (
+ json.loads(stdout)["output"]["secondaryFiles"][0]["checksum"]
+ == "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
+ )
+
+
+@needs_podman
+def test_podman_required_secfile(tmp_path: Path) -> None:
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--podman",
+ "--outdir",
+ str(tmp_path),
+ get_data("tests/secondary-files-required-container.cwl"),
+ ]
+ )
+ assert result_code == 0, stderr
+ assert (
+ json.loads(stdout)["output"]["secondaryFiles"][0]["checksum"]
+ == "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
+ )
+
+
+@needs_singularity
+def test_singularity_required_secfile(tmp_path: Path, monkeypatch: pytest.MonkeyPatch) -> None:
+ singularity_dir = tmp_path / "singularity"
+ singularity_dir.mkdir()
+ monkeypatch.setenv("CWL_SINGULARITY_CACHE", str(singularity_dir))
+
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--singularity",
+ "--outdir",
+ str(tmp_path / "out"),
+ get_data("tests/secondary-files-required-container.cwl"),
+ ]
+ )
+ assert result_code == 0, stderr
+ assert (
+ json.loads(stdout)["output"]["secondaryFiles"][0]["checksum"]
+ == "sha1$da39a3ee5e6b4b0d3255bfef95601890afd80709"
+ )
+
+
+@needs_docker
+def test_docker_required_missing_secfile(tmp_path: Path) -> None:
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--outdir",
+ str(tmp_path),
+ get_data("tests/secondary-files-required-missing-container.cwl"),
+ ]
+ )
+ assert result_code == 1, stderr
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert "Job error:" in stderr
+ assert "Error collecting output for parameter 'output'" in stderr
+ assert (
+ "tests/secondary-files-required-missing-container.cwl:16:5: Missing required secondary file"
+ )
+ assert "file.ext3" in stderr
+
+
+@needs_podman
+def test_podman_required_missing_secfile(tmp_path: Path) -> None:
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--podman",
+ "--outdir",
+ str(tmp_path),
+ get_data("tests/secondary-files-required-missing-container.cwl"),
+ ]
+ )
+ assert result_code == 1, stderr
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert "Job error:" in stderr
+ assert "Error collecting output for parameter 'output'" in stderr
+ assert (
+ "tests/secondary-files-required-missing-container.cwl:16:5: Missing required secondary file"
+ )
+ assert "file.ext3" in stderr
+
+
+@needs_singularity
+def test_singularity_required_missing_secfile(
+ tmp_path: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
+ singularity_dir = tmp_path / "singularity"
+ singularity_dir.mkdir()
+ monkeypatch.setenv("CWL_SINGULARITY_CACHE", str(singularity_dir))
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--singularity",
+ "--outdir",
+ str(tmp_path),
+ get_data("tests/secondary-files-required-missing-container.cwl"),
+ ]
+ )
+ assert result_code == 1, stderr
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert "Job error:" in stderr
+ assert "Error collecting output for parameter 'output'" in stderr
+ assert (
+ "tests/secondary-files-required-missing-container.cwl:16:5: Missing required secondary file"
+ )
+ assert "file.ext3" in stderr
+
+
+@needs_docker
+def test_docker_shm_size(tmp_path: Path) -> None:
+ result_code, stdout, stderr = get_main_output(
+ [
+ "--enable-ext",
+ "--default-container",
+ "docker.io/debian:stable-slim",
+ "--outdir",
+ str(tmp_path),
+ get_data("tests/wf/shm_size.cwl"),
+ ]
+ )
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert result_code == 0
+ assert "--shm-size=128m" in stderr
diff --git a/tests/test_environment.py b/tests/test_environment.py
index 0fad65d7f..ba87041b3 100644
--- a/tests/test_environment.py
+++ b/tests/test_environment.py
@@ -1,4 +1,5 @@
"""Test passing of environment variables to tools."""
+
import os
from abc import ABC, abstractmethod
from pathlib import Path
@@ -28,7 +29,7 @@ def assert_envvar_matches(check: CheckerTypes, k: str, env: Mapping[str, str]) -
if isinstance(check, str):
assert v == check, f"Environment variable {k} == {v!r} != {check!r}"
else:
- assert check(v, env), f"Environment variable {k}={v!r} fails check"
+ assert check(v, env), f"Environment variable {k}={v!r} fails check."
def assert_env_matches(
@@ -149,11 +150,14 @@ def PWD(v: str, env: Env) -> bool:
elif vminor > 5:
sing_vars["SINGULARITY_COMMAND"] = "exec"
if vminor >= 7:
+ if vminor > 9:
+ sing_vars["SINGULARITY_BIND"] = ""
+ else:
- def BIND(v: str, env: Env) -> bool:
- return v.startswith(tmp_prefix) and v.endswith(":/tmp")
+ def BIND(v: str, env: Env) -> bool:
+ return v.startswith(tmp_prefix) and v.endswith(":/tmp")
- sing_vars["SINGULARITY_BIND"] = BIND
+ sing_vars["SINGULARITY_BIND"] = BIND
result.update(sing_vars)
diff --git a/tests/test_examples.py b/tests/test_examples.py
index 9fa800af6..4d479e313 100644
--- a/tests/test_examples.py
+++ b/tests/test_examples.py
@@ -6,6 +6,7 @@
import stat
import subprocess
import sys
+import urllib.parse
from io import StringIO
from pathlib import Path
from typing import Any, Dict, List, Union, cast
@@ -1324,6 +1325,28 @@ def test_cache_relative_paths(tmp_path: Path, factor: str) -> None:
assert (tmp_path / "cwltool_cache" / "27903451fc1ee10c148a0bdeb845b2cf").exists()
+@pytest.mark.parametrize("factor", test_factors)
+def test_cache_default_literal_file(tmp_path: Path, factor: str) -> None:
+ """Confirm that running a CLT with a default literal file with caching succeeds."""
+ test_file = "tests/wf/extract_region_specs.cwl"
+ cache_dir = str(tmp_path / "cwltool_cache")
+ commands = factor.split()
+ commands.extend(
+ [
+ "--out",
+ str(tmp_path / "out"),
+ "--cachedir",
+ cache_dir,
+ get_data(test_file),
+ ]
+ )
+ error_code, _, stderr = get_main_output(commands)
+
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert "completed success" in stderr
+ assert error_code == 0
+
+
def test_write_summary(tmp_path: Path) -> None:
"""Test --write-summary."""
commands = [
@@ -1473,7 +1496,9 @@ def test_bad_userspace_runtime(factor: str) -> None:
)
error_code, stdout, stderr = get_main_output(commands)
stderr = re.sub(r"\s\s+", " ", stderr)
- assert "or quaquioN is missing or broken" in stderr, stderr
+ assert ("or quaquioN is missing or broken" in stderr) or (
+ "No such file or directory: 'quaquioN'" in stderr
+ ), stderr
assert error_code == 1
@@ -1717,7 +1742,7 @@ def test_expression_tool_class() -> None:
factory = cwltool.factory.Factory()
tool_path = get_data("tests/wf/parseInt-tool.cwl")
expression_tool = factory.make(tool_path).t
- assert str(expression_tool) == f"ExpressionTool: file://{tool_path}"
+ assert urllib.parse.unquote(str(expression_tool)) == f"ExpressionTool: file://{tool_path}"
def test_operation_class() -> None:
@@ -1725,7 +1750,7 @@ def test_operation_class() -> None:
factory = cwltool.factory.Factory()
tool_path = get_data("tests/wf/operation/abstract-cosifer.cwl")
expression_tool = factory.make(tool_path).t
- assert str(expression_tool) == f"AbstractOperation: file://{tool_path}"
+ assert urllib.parse.unquote(str(expression_tool)) == f"AbstractOperation: file://{tool_path}"
def test_command_line_tool_class() -> None:
@@ -1733,7 +1758,7 @@ def test_command_line_tool_class() -> None:
factory = cwltool.factory.Factory()
tool_path = get_data("tests/echo.cwl")
expression_tool = factory.make(tool_path).t
- assert str(expression_tool) == f"CommandLineTool: file://{tool_path}"
+ assert urllib.parse.unquote(str(expression_tool)) == f"CommandLineTool: file://{tool_path}"
def test_record_default_with_long(tmp_path: Path) -> None:
@@ -1839,3 +1864,33 @@ def test_very_small_and_large_floats() -> None:
)
assert exit_code == 0, stderr
assert json.loads(stdout)["result"] == "0.00001 0.0000123 123000 1230000"
+
+
+def test_invalid_nested_array() -> None:
+ """Test feature proposed for CWL v1.3 in a CWL v1.2 document."""
+ exit_code, stdout, stderr = get_main_output(
+ [
+ "--validate",
+ get_data("tests/nested-array.cwl"),
+ ]
+ )
+ assert exit_code == 1, stderr
+ stderr = re.sub(r"\n\s+", " ", stderr)
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert "Tool definition failed validation:" in stderr
+ assert (
+ "tests/nested-array.cwl:6:5: Field 'type' references unknown identifier 'string[][]'"
+ ) in stderr
+
+
+def test_input_named_id() -> None:
+ """Confirm that it is valid to have an input named "id"."""
+ exit_code, stdout, stderr = get_main_output(
+ [
+ "--validate",
+ "--debug",
+ get_data("tests/wf/input_named_id.cwl"),
+ get_data("tests/wf/input_named_id.yaml"),
+ ]
+ )
+ assert exit_code == 0, stderr
diff --git a/tests/test_ext.py b/tests/test_ext.py
index ffda59c5e..0d2665ca1 100644
--- a/tests/test_ext.py
+++ b/tests/test_ext.py
@@ -8,13 +8,16 @@
import cwltool.process
from cwltool.main import main
-from .util import get_data, needs_docker
+from .util import get_data, get_main_output, needs_docker
@needs_docker
def test_missing_enable_ext() -> None:
- # Require that --enable-ext is provided.
- assert main([get_data("tests/wf/listing_deep.cwl"), get_data("tests/listing-job.yml")]) != 0
+ """Require that --enable-ext is provided."""
+ error_code, _, _ = get_main_output(
+ [get_data("tests/wf/listing_deep.cwl"), get_data("tests/listing-job.yml")]
+ )
+ assert error_code != 0
@needs_docker
@@ -67,13 +70,6 @@ def test_listing_v1_0() -> None:
assert main([get_data("tests/wf/listing_v1_0.cwl"), get_data("tests/listing-job.yml")]) == 0
-@pytest.mark.skip(reason="This is not the default behaviour yet")
-@needs_docker
-def test_listing_v1_1() -> None:
- # Default behavior in 1.1 will be no expansion
- assert main([get_data("tests/wf/listing_v1_1.cwl"), get_data("tests/listing-job.yml")]) != 0
-
-
@needs_docker
def test_double_overwrite(tmp_path: Path) -> None:
"""Test that overwriting an input using cwltool:InplaceUpdateRequirement works."""
@@ -278,3 +274,14 @@ def test_warn_large_inputs() -> None:
)
finally:
cwltool.process.FILE_COUNT_WARNING = was
+
+
+def test_ext_validation_no_namespace_warning() -> None:
+ error_code, stdout, stderr = get_main_output(
+ ["--validate", "--enable-ext", get_data("tests/wf/mpi_env.cwl")]
+ )
+ assert error_code == 0
+ assert (
+ "URI prefix 'cwltool' of 'cwltool:loop' not recognized, are you "
+ "missing a $namespaces section?"
+ ) not in stderr
diff --git a/tests/test_iwdr.py b/tests/test_iwdr.py
index 5a370ae55..8d4138cb7 100644
--- a/tests/test_iwdr.py
+++ b/tests/test_iwdr.py
@@ -1,4 +1,5 @@
"""InitialWorkDirRequirement related tests."""
+
import json
import re
from pathlib import Path
diff --git a/tests/test_js_sandbox.py b/tests/test_js_sandbox.py
index 9739c77a7..f4839e8a0 100644
--- a/tests/test_js_sandbox.py
+++ b/tests/test_js_sandbox.py
@@ -1,4 +1,5 @@
"""Test sandboxjs.py and related code."""
+
import logging
import os
import shutil
@@ -22,7 +23,7 @@
("v7.7.3\n", True),
]
-configure_logging(_logger.handlers[-1], False, True, True, True)
+configure_logging(_logger.handlers[-1], False, False, True, True, True)
_logger.setLevel(logging.DEBUG)
@@ -64,7 +65,7 @@ def hide_nodejs(temp_dir: Path) -> str:
os.symlink(os.path.join(dirname, entry), new_dir / entry)
paths.append(str(new_dir))
dirname_path = Path(dirname)
- for path in paths:
+ for path in list(paths):
if Path(path).resolve() == dirname_path:
paths.remove(path)
return ":".join(paths)
diff --git a/tests/test_load_tool.py b/tests/test_load_tool.py
index 3d0cba161..2c504eafd 100644
--- a/tests/test_load_tool.py
+++ b/tests/test_load_tool.py
@@ -1,5 +1,7 @@
"""Tests for cwltool.load_tool."""
+
import logging
+import urllib.parse
from pathlib import Path
import pytest
@@ -15,7 +17,7 @@
from .util import get_data
-configure_logging(_logger.handlers[-1], False, True, True, True)
+configure_logging(_logger.handlers[-1], False, False, True, True, True)
_logger.setLevel(logging.DEBUG)
@@ -86,7 +88,7 @@ def test_load_graph_fragment_from_packed() -> None:
loadingContext = LoadingContext()
uri = Path(get_data("tests/wf/packed-with-loadlisting.cwl")).as_uri() + "#main"
try:
- with open(get_data("cwltool/extensions.yml")) as res:
+ with open(get_data("extensions.yml")) as res:
use_custom_schema("v1.0", "http://commonwl.org/cwltool", res.read())
# The updater transforms LoadListingRequirement from an
@@ -133,17 +135,17 @@ def test_import_tracked() -> None:
loadingContext = LoadingContext({"fast_parser": True})
tool = load_tool(get_data("tests/wf/811-12.cwl"), loadingContext)
- path = "import:file://%s" % get_data("tests/wf/schemadef-type.yml")
+ path = f"import:file://{get_data('tests/wf/schemadef-type.yml')}"
+ path2 = f"import:file://{urllib.parse.quote(get_data('tests/wf/schemadef-type.yml'))}"
assert tool.doc_loader is not None
- assert path in tool.doc_loader.idx
+ assert path in tool.doc_loader.idx or path2 in tool.doc_loader.idx
loadingContext = LoadingContext({"fast_parser": False})
tool = load_tool(get_data("tests/wf/811.cwl"), loadingContext)
- path = "import:file://%s" % get_data("tests/wf/schemadef-type.yml")
assert tool.doc_loader is not None
- assert path in tool.doc_loader.idx
+ assert path in tool.doc_loader.idx or path2 in tool.doc_loader.idx
def test_load_badhints() -> None:
diff --git a/tests/test_loop.py b/tests/test_loop.py
index 4d82e7e91..ab1d9b0f6 100644
--- a/tests/test_loop.py
+++ b/tests/test_loop.py
@@ -1,4 +1,5 @@
"""Test the prototype loop extension."""
+
import json
from io import StringIO
from typing import MutableMapping, MutableSequence
@@ -221,6 +222,19 @@ def test_loop_inside_scatter() -> None:
assert json.loads(stream.getvalue()) == expected
+def test_scatter_inside_loop() -> None:
+ """Test a loop workflow with inside a scatter step."""
+ stream = StringIO()
+ params = [
+ "--enable-ext",
+ get_data("tests/loop/scatter-inside-loop.cwl"),
+ get_data("tests/loop/loop-inside-scatter-job.yml"),
+ ]
+ main(params, stdout=stream)
+ expected = {"o1": [10, 11, 12, 13, 14]}
+ assert json.loads(stream.getvalue()) == expected
+
+
def test_nested_loops() -> None:
"""Test a workflow with two nested loops."""
stream = StringIO()
diff --git a/tests/test_main_parsed_args.py b/tests/test_main_parsed_args.py
new file mode 100644
index 000000000..7a94d9225
--- /dev/null
+++ b/tests/test_main_parsed_args.py
@@ -0,0 +1,40 @@
+import io
+from pathlib import Path
+
+from cwltool.argparser import arg_parser
+from cwltool.main import main
+
+from .util import get_data
+
+
+def test_main_parsed_args(tmp_path: Path) -> None:
+ """Affirm that main can be called with parsed args only."""
+ stdout = io.StringIO()
+ stderr = io.StringIO()
+
+ unparsed_args = [get_data("tests/echo.cwl"), "--inp", "Hello"]
+ parsed_args = arg_parser().parse_args(unparsed_args)
+
+ try:
+ assert main(args=parsed_args, stdout=stdout, stderr=stderr) == 0
+ except SystemExit as err:
+ assert err.code == 0
+
+
+def test_main_parsed_args_provenance(tmp_path: Path) -> None:
+ """Affirm that main can be called with parsed args only, requesting provenance."""
+ stdout = io.StringIO()
+ stderr = io.StringIO()
+
+ prov_folder = tmp_path / "provenance" # will be created if necessary
+
+ unparsed_args = ["--provenance", str(prov_folder), get_data("tests/echo.cwl"), "--inp", "Hello"]
+ parsed_args = arg_parser().parse_args(unparsed_args)
+
+ try:
+ assert main(args=parsed_args, stdout=stdout, stderr=stderr) == 0
+ except SystemExit as err:
+ assert err.code == 0
+
+ manifest_file = prov_folder / "metadata" / "manifest.json"
+ assert manifest_file.is_file(), f"Can't find RO-Crate manifest {manifest_file}"
diff --git a/tests/test_mpi.py b/tests/test_mpi.py
index 3e3b5d491..643907a39 100644
--- a/tests/test_mpi.py
+++ b/tests/test_mpi.py
@@ -1,4 +1,5 @@
"""Tests of the experimental MPI extension."""
+
import json
import os.path
import sys
@@ -6,7 +7,6 @@
from pathlib import Path
from typing import Any, Generator, List, MutableMapping, Optional, Tuple
-import pkg_resources
import pytest
from ruamel.yaml.comments import CommentedMap, CommentedSeq
from schema_salad.avro.schema import Names
@@ -19,6 +19,7 @@
from cwltool.context import LoadingContext, RuntimeContext
from cwltool.main import main
from cwltool.mpi import MpiConfig, MPIRequirementName
+from cwltool.utils import files
from .util import get_data, working_directory
@@ -281,12 +282,11 @@ def test_env_passing(monkeypatch: pytest.MonkeyPatch) -> None:
# Reading the schema is super slow - cache for the session
@pytest.fixture(scope="session")
def schema_ext11() -> Generator[Names, None, None]:
- with pkg_resources.resource_stream("cwltool", "extensions-v1.1.yml") as res:
- ext11 = res.read().decode("utf-8")
- cwltool.process.use_custom_schema("v1.1", "http://commonwl.org/cwltool", ext11)
- schema = cwltool.process.get_schema("v1.1")[1]
- assert isinstance(schema, Names)
- yield schema
+ ext11 = files("cwltool").joinpath("extensions-v1.1.yml").read_text("utf-8")
+ cwltool.process.use_custom_schema("v1.1", "http://commonwl.org/cwltool", ext11)
+ schema = cwltool.process.get_schema("v1.1")[1]
+ assert isinstance(schema, Names)
+ yield schema
mpiReq = CommentedMap({"class": MPIRequirementName, "processes": 1})
diff --git a/tests/test_path_checks.py b/tests/test_path_checks.py
index 2ebda7fe3..018a92120 100644
--- a/tests/test_path_checks.py
+++ b/tests/test_path_checks.py
@@ -107,7 +107,7 @@ def test_unicode_in_output_files(tmp_path: Path, filename: str) -> None:
assert main(params) == 0
-class TestFsAccess(StdFsAccess):
+class StubFsAccess(StdFsAccess):
"""Stub fs access object that doesn't rely on the filesystem."""
def glob(self, pattern: str) -> List[str]:
@@ -195,7 +195,7 @@ def test_clt_returns_specialchar_names(tmp_path: Path) -> None:
builder.files, builder.stagedir, RuntimeContext(), True
)
builder.outdir = "/var/spool/cwl"
- fs_access = TestFsAccess("")
+ fs_access = StubFsAccess("")
result = cast(
CWLObjectType,
diff --git a/tests/test_recursive_validation.py b/tests/test_recursive_validation.py
index 300c99d88..2794a0094 100644
--- a/tests/test_recursive_validation.py
+++ b/tests/test_recursive_validation.py
@@ -1,4 +1,5 @@
"""Test the recursive validation feature (validate document and try to build tool)."""
+
from cwltool.load_tool import fetch_document, recursive_resolve_and_validate_document
from .util import get_data
diff --git a/tests/test_relocate.py b/tests/test_relocate.py
index e3c7e59c8..81877c776 100644
--- a/tests/test_relocate.py
+++ b/tests/test_relocate.py
@@ -1,4 +1,6 @@
import json
+import os
+import shutil
import sys
from pathlib import Path
@@ -56,15 +58,19 @@ def test_for_conflict_file_names_nodocker(tmp_path: Path) -> None:
def test_relocate_symlinks(tmp_path: Path) -> None:
+ shutil.copy(get_data("tests/reloc/test.cwl"), tmp_path)
+ (tmp_path / "dir1").mkdir()
+ (tmp_path / "dir1" / "foo").touch()
+ os.symlink(tmp_path / "dir1", tmp_path / "dir2")
assert (
main(
[
"--debug",
"--outdir",
- get_data("tests/reloc") + "/dir2",
- get_data("tests/reloc/test.cwl"),
+ str(tmp_path / "dir2"),
+ str(tmp_path / "test.cwl"),
"--inp",
- get_data("tests/reloc") + "/dir2",
+ str(tmp_path / "dir2"),
]
)
== 0
diff --git a/tests/test_singularity.py b/tests/test_singularity.py
index 605f22028..0512f2e28 100644
--- a/tests/test_singularity.py
+++ b/tests/test_singularity.py
@@ -1,4 +1,5 @@
"""Tests to find local Singularity image."""
+
import shutil
from pathlib import Path
from typing import Any
diff --git a/tests/test_singularity_versions.py b/tests/test_singularity_versions.py
index 3f79e8266..f7b30d8a3 100644
--- a/tests/test_singularity_versions.py
+++ b/tests/test_singularity_versions.py
@@ -1,4 +1,5 @@
"""Test singularity{,-ce} & apptainer versions."""
+
from subprocess import check_output # nosec
import cwltool.singularity
@@ -21,7 +22,7 @@ def reset_singularity_version_cache() -> None:
def set_dummy_check_output(name: str, version: str) -> None:
"""Mock out subprocess.check_output."""
cwltool.singularity.check_output = ( # type: ignore[attr-defined]
- lambda c, universal_newlines: name + " version " + version
+ lambda c, text: name + " version " + version # type: ignore[assignment]
)
diff --git a/tests/test_streaming.py b/tests/test_streaming.py
index 3c5526592..83ad81e0d 100644
--- a/tests/test_streaming.py
+++ b/tests/test_streaming.py
@@ -1,4 +1,5 @@
"""Test that files marked as 'streamable' when 'streaming_allowed' can be named pipes."""
+
import os
from pathlib import Path
from typing import cast
diff --git a/tests/test_subclass_mypyc.py b/tests/test_subclass_mypyc.py
index 9f302fead..aa4964b34 100644
--- a/tests/test_subclass_mypyc.py
+++ b/tests/test_subclass_mypyc.py
@@ -15,6 +15,7 @@
from cwltool.context import LoadingContext, RuntimeContext
from cwltool.stdfsaccess import StdFsAccess
from cwltool.update import INTERNAL_VERSION
+from cwltool.workflow import Workflow
from .test_anon_types import snippet
@@ -41,6 +42,16 @@ class TestExprTool(ExpressionTool):
assert a.test is False
+@pytest.mark.parametrize("snippet", snippet)
+def test_pickle_unpickle_workflow(snippet: CommentedMap) -> None:
+ """We can pickle & unpickle a Workflow."""
+
+ a = Workflow(snippet, LoadingContext())
+ stream = pickle.dumps(a)
+ assert stream
+ assert pickle.loads(stream)
+
+
def test_serialize_builder() -> None:
"""We can pickle Builder."""
runtime_context = RuntimeContext()
@@ -70,3 +81,12 @@ def test_serialize_builder() -> None:
"docker",
)
assert pickle.dumps(builder)
+
+
+def test_pickle_unpickle_runtime_context() -> None:
+ """We can pickle & unpickle RuntimeContext"""
+
+ runtime_context = RuntimeContext()
+ stream = pickle.dumps(runtime_context)
+ assert stream
+ assert pickle.loads(stream)
diff --git a/tests/test_tmpdir.py b/tests/test_tmpdir.py
index 14caecb3c..af830834b 100644
--- a/tests/test_tmpdir.py
+++ b/tests/test_tmpdir.py
@@ -1,5 +1,8 @@
"""Test that all temporary directories respect the --tmpdir-prefix and --tmp-outdir-prefix options."""
+
+import os
import re
+import shutil
import subprocess
import sys
from pathlib import Path
@@ -17,11 +20,12 @@
from cwltool.job import JobBase
from cwltool.main import main
from cwltool.pathmapper import MapperEnt
+from cwltool.singularity import SingularityCommandLineJob
from cwltool.stdfsaccess import StdFsAccess
from cwltool.update import INTERNAL_VERSION, ORIGINAL_CWLVERSION
from cwltool.utils import create_tmp_dir
-from .util import get_data, get_main_output, needs_docker
+from .util import get_data, get_main_output, needs_docker, needs_singularity
def test_docker_commandLineTool_job_tmpdir_prefix(tmp_path: Path) -> None:
@@ -164,6 +168,65 @@ def test_dockerfile_tmpdir_prefix(tmp_path: Path, monkeypatch: pytest.MonkeyPatc
assert (subdir / "Dockerfile").exists()
+@needs_singularity
+def test_dockerfile_singularity_build(monkeypatch: pytest.MonkeyPatch, tmp_path: Path) -> None:
+ """Test that SingularityCommandLineJob.get_image builds a Dockerfile with Singularity."""
+ tmppath = Path(os.environ.get("APPTAINER_TMPDIR", tmp_path))
+ # some HPC not allowed to execute on /tmp so allow user to define temp path with APPTAINER_TMPDIR
+ # FATAL: Unable to create build: 'noexec' mount option set on /tmp, temporary root filesystem
+ monkeypatch.setattr(target=subprocess, name="check_call", value=lambda *args, **kwargs: True)
+ (tmppath / "out").mkdir(exist_ok=True)
+ tmp_outdir_prefix = tmppath / "out" / "1"
+ (tmppath / "3").mkdir(exist_ok=True)
+ tmpdir_prefix = str(tmppath / "3" / "ttmp")
+ runtime_context = RuntimeContext(
+ {"tmpdir_prefix": tmpdir_prefix, "user_space_docker_cmd": None}
+ )
+ builder = Builder(
+ {},
+ [],
+ [],
+ {},
+ schema.Names(),
+ [],
+ [],
+ {},
+ None,
+ None,
+ StdFsAccess,
+ StdFsAccess(""),
+ None,
+ 0.1,
+ True,
+ False,
+ False,
+ "no_listing",
+ runtime_context.get_outdir(),
+ runtime_context.get_tmpdir(),
+ runtime_context.get_stagedir(),
+ INTERNAL_VERSION,
+ "singularity",
+ )
+
+ assert SingularityCommandLineJob(
+ builder, {}, CommandLineTool.make_path_mapper, [], [], ""
+ ).get_image(
+ {
+ "class": "DockerRequirement",
+ "dockerFile": "FROM debian:stable-slim",
+ },
+ pull_image=True,
+ tmp_outdir_prefix=str(tmp_outdir_prefix),
+ force_pull=True,
+ )
+ children = sorted(tmp_outdir_prefix.parent.glob("*"))
+ subdir = tmppath / children[0]
+ children = sorted(subdir.glob("*.sif"))
+ image_path = children[0]
+ assert image_path.exists()
+ shutil.rmtree(subdir)
+
+
def test_docker_tmpdir_prefix(tmp_path: Path) -> None:
"""Test that DockerCommandLineJob respects temp directory directives."""
(tmp_path / "3").mkdir()
diff --git a/tests/test_toolargparse.py b/tests/test_toolargparse.py
index ad6626385..756c373e8 100644
--- a/tests/test_toolargparse.py
+++ b/tests/test_toolargparse.py
@@ -180,7 +180,7 @@ def test_dont_require_inputs(tmp_path: Path) -> None:
def test_argparser_with_doc() -> None:
- """The `desription` field is set if `doc` field is provided."""
+ """The `description` field is set if `doc` field is provided."""
loadingContext = LoadingContext()
tool = load_tool(get_data("tests/with_doc.cwl"), loadingContext)
p = argparse.ArgumentParser()
@@ -189,7 +189,7 @@ def test_argparser_with_doc() -> None:
def test_argparser_without_doc() -> None:
- """The `desription` field is None if `doc` field is not provided."""
+ """The `description` field is None if `doc` field is not provided."""
loadingContext = LoadingContext()
tool = load_tool(get_data("tests/without_doc.cwl"), loadingContext)
p = argparse.ArgumentParser()
diff --git a/tests/test_udocker.py b/tests/test_udocker.py
index eb9a0ebfe..5a8b96304 100644
--- a/tests/test_udocker.py
+++ b/tests/test_udocker.py
@@ -1,4 +1,5 @@
"""Test optional udocker feature."""
+
import copy
import os
import subprocess
@@ -11,6 +12,7 @@
from .util import get_data, get_main_output, working_directory
LINUX = sys.platform in ("linux", "linux2")
+UDOCKER_VERSION = "1.3.12"
@pytest.fixture(scope="session")
@@ -19,10 +21,13 @@ def udocker(tmp_path_factory: TempPathFactory) -> str:
test_environ = copy.copy(os.environ)
docker_install_dir = str(tmp_path_factory.mktemp("udocker"))
with working_directory(docker_install_dir):
- url = "https://github.com/indigo-dc/udocker/releases/download/1.3.5/udocker-1.3.5.tar.gz"
+ url = (
+ "https://github.com/indigo-dc/udocker/releases/download/"
+ f"{UDOCKER_VERSION}/udocker-{UDOCKER_VERSION}.tar.gz"
+ )
install_cmds = [
["curl", "-L", url, "-o", "./udocker-tarball.tgz"],
- ["tar", "xzvf", "udocker-tarball.tgz"],
+ ["tar", "--strip-components=1", "-xzvf", "udocker-tarball.tgz"],
["./udocker/udocker", "install"],
]
diff --git a/tests/test_user_agent.py b/tests/test_user_agent.py
new file mode 100644
index 000000000..0554b558f
--- /dev/null
+++ b/tests/test_user_agent.py
@@ -0,0 +1,24 @@
+import requests
+
+from cwltool.main import append_word_to_default_user_agent, main
+
+
+def get_user_agent() -> str:
+ return requests.utils.default_headers()["User-Agent"]
+
+
+def test_cwltool_in_user_agent() -> None:
+ """python-requests HTTP User-Agent should include the string 'cwltool'."""
+ try:
+ assert main(["--version"]) == 0
+ except SystemExit as err:
+ assert err.code == 0
+ assert "cwltool" in get_user_agent()
+
+
+def test_append_word_to_default_user_agent() -> None:
+ """Confirm that append_word_to_default_user_agent works."""
+ word_to_append = "foobar123"
+ assert word_to_append not in get_user_agent()
+ append_word_to_default_user_agent(word_to_append)
+ assert word_to_append in get_user_agent()
diff --git a/tests/test_validate.py b/tests/test_validate.py
index e809df386..171a6b6c1 100644
--- a/tests/test_validate.py
+++ b/tests/test_validate.py
@@ -1,5 +1,6 @@
"""Tests --validation."""
+import re
from .util import get_data, get_main_output
@@ -14,3 +15,41 @@ def test_validate_graph_with_no_default() -> None:
assert "packed_no_main.cwl#cat is valid CWL" in stdout
assert "packed_no_main.cwl#collision is valid CWL" in stdout
assert "tests/wf/packed_no_main.cwl is valid CWL" in stdout
+
+
+def test_validate_with_valid_input_object() -> None:
+ """Ensure that --validate with a valid input object."""
+ exit_code, stdout, stderr = get_main_output(
+ [
+ "--validate",
+ get_data("tests/wf/1st-workflow.cwl"),
+ "--inp",
+ get_data("tests/wf/1st-workflow.cwl"),
+ "--ex",
+ "FOO",
+ ]
+ )
+ assert exit_code == 0
+ assert "tests/wf/1st-workflow.cwl is valid CWL. No errors detected in the inputs." in stdout
+
+
+def test_validate_with_invalid_input_object() -> None:
+ """Ensure that --validate with an invalid input object."""
+ exit_code, stdout, stderr = get_main_output(
+ [
+ "--validate",
+ get_data("tests/wf/1st-workflow.cwl"),
+ get_data("tests/wf/1st-workflow_bad_inputs.yml"),
+ ]
+ )
+ assert exit_code == 1
+ stderr = re.sub(r"\s\s+", " ", stderr)
+ assert "Invalid job input record" in stderr
+ assert (
+ "tests/wf/1st-workflow_bad_inputs.yml:2:1: * the 'ex' field is not "
+ "valid because the value is not string" in stderr
+ )
+ assert (
+ "tests/wf/1st-workflow_bad_inputs.yml:1:1: * the 'inp' field is not "
+ "valid because is not a dict. Expected a File object." in stderr
+ )
diff --git a/tests/util.py b/tests/util.py
index e5e128936..0547cfa9a 100644
--- a/tests/util.py
+++ b/tests/util.py
@@ -1,3 +1,6 @@
+"""Test functions."""
+
+import atexit
import contextlib
import io
import json
@@ -5,15 +8,16 @@
import shutil
import subprocess
import sys
+from contextlib import ExitStack
from pathlib import Path
from typing import Dict, Generator, List, Mapping, Optional, Tuple, Union
import pytest
-from pkg_resources import Requirement, ResolutionError, resource_filename
from cwltool.env_to_stdout import deserialize_env
from cwltool.main import main
from cwltool.singularity import is_version_2_6, is_version_3_or_newer
+from cwltool.utils import as_file, files
def force_default_container(default_container_id: str, _: str) -> str:
@@ -25,12 +29,15 @@ def get_data(filename: str) -> str:
filename = os.path.normpath(filename)
filepath = None
try:
- filepath = resource_filename(Requirement.parse("cwltool"), filename)
- except ResolutionError:
+ file_manager = ExitStack()
+ atexit.register(file_manager.close)
+ traversable = files("cwltool") / filename
+ filepath = file_manager.enter_context(as_file(traversable))
+ except ModuleNotFoundError:
pass
if not filepath or not os.path.isfile(filepath):
- filepath = os.path.join(os.path.dirname(__file__), os.pardir, filename)
- return str(Path(filepath).resolve())
+ filepath = Path(os.path.dirname(__file__)) / ".." / filename
+ return str(filepath.resolve())
needs_docker = pytest.mark.skipif(
@@ -67,8 +74,7 @@ def env_accepts_null() -> bool:
if _env_accepts_null is None:
result = subprocess.run(
["env", "-0"],
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
+ capture_output=True,
encoding="utf-8",
)
_env_accepts_null = result.returncode == 0
@@ -147,13 +153,13 @@ def get_tool_env(
args.append(inputs_file)
with working_directory(tmp_path):
- rc, stdout, _ = get_main_output(
+ rc, stdout, stderr = get_main_output(
args,
replacement_env=replacement_env,
extra_env=extra_env,
monkeypatch=monkeypatch,
)
- assert rc == 0
+ assert rc == 0, stdout + "\n" + stderr
output = json.loads(stdout)
with open(output["env"]["path"]) as _:
diff --git a/tests/wf/1st-workflow_bad_inputs.yml b/tests/wf/1st-workflow_bad_inputs.yml
new file mode 100644
index 000000000..d95783be2
--- /dev/null
+++ b/tests/wf/1st-workflow_bad_inputs.yml
@@ -0,0 +1,4 @@
+inp: 42
+ex:
+ class: File
+ path: 1st-workflow.cwl
diff --git a/tests/wf/echo.cwl b/tests/wf/echo.cwl
index 9c633142e..65b451fb2 100755
--- a/tests/wf/echo.cwl
+++ b/tests/wf/echo.cwl
@@ -21,4 +21,4 @@ outputs:
type: File
outputBinding:
glob: foo$(inputs.r).txt
-arguments: [python, -c, $(inputs.script), $(inputs.r)]
+arguments: [python3, -c, $(inputs.script), $(inputs.r)]
diff --git a/tests/wf/extract_region_specs.cwl b/tests/wf/extract_region_specs.cwl
new file mode 100644
index 000000000..279fa4400
--- /dev/null
+++ b/tests/wf/extract_region_specs.cwl
@@ -0,0 +1,21 @@
+{
+"cwlVersion": "v1.0",
+"class": "CommandLineTool",
+"inputs": [
+ {
+ "type": "File",
+ "default": {
+ "class": "File",
+ "basename": "extract_regions.py",
+ "contents": "#!/usr/bin/env python3\n\nfrom __future__ import print_function, division\nimport sys\n\ninput_filename = sys.argv[1]\nif len(sys.argv) == 3:\n fuzz = int(sys.argv[2])\nelse:\n fuzz = 0\ninput_file = open(input_filename)\n\ncount = 0\nfor line in input_file:\n if not line.startswith(\">\"):\n continue\n count += 1\n contig_regions_file = open(\"contig_regions{}.txt\".format(count), \"w\")\n proteins_list_file = open(\"proteins{}.txt\".format(count), \"w\")\n fields = line.split(\"|\")\n protein_id = fields[0][1:]\n contig_id = fields[1]\n r_start = int(fields[6])\n if r_start > fuzz:\n r_start = r_start - fuzz\n r_end = int(fields[7]) + fuzz\n print(\"{}:{}-{}\".format(contig_id, r_start, r_end), file=contig_regions_file)\n print(protein_id, file=proteins_list_file)\n contig_regions_file.close()\n proteins_list_file.close()\n"
+ },
+ "inputBinding": {
+ "position": 1
+ },
+ "id": "scripts"
+ }
+],
+"outputs": [
+],
+"baseCommand": "cat"
+}
diff --git a/tests/wf/generator/pytoolgen.cwl b/tests/wf/generator/pytoolgen.cwl
index 3d8ce7879..a7dcc9e1b 100644
--- a/tests/wf/generator/pytoolgen.cwl
+++ b/tests/wf/generator/pytoolgen.cwl
@@ -28,5 +28,5 @@ run:
v.push({entryname: "inp.py", entry: inputs.script});
return v;
}
- arguments: [python, inp.py]
+ arguments: [python3, inp.py]
stdout: main.cwl
diff --git a/tests/wf/input_named_id.cwl b/tests/wf/input_named_id.cwl
new file mode 100644
index 000000000..e559f967b
--- /dev/null
+++ b/tests/wf/input_named_id.cwl
@@ -0,0 +1,13 @@
+label: FeatureFinderIdentification
+doc: ""
+inputs:
+ id:
+ doc: featureXML or consensusXML file
+ type: File
+outputs:
+ []
+cwlVersion: v1.2
+class: CommandLineTool
+baseCommand:
+ - FeatureFinderIdentification
+
diff --git a/tests/wf/input_named_id.yaml b/tests/wf/input_named_id.yaml
new file mode 100644
index 000000000..39c36dea8
--- /dev/null
+++ b/tests/wf/input_named_id.yaml
@@ -0,0 +1,3 @@
+id:
+ class: File
+ path: ../2.fastq
diff --git a/tests/wf/js_output.cwl b/tests/wf/js_output.cwl
index 4fa154c08..87b456727 100755
--- a/tests/wf/js_output.cwl
+++ b/tests/wf/js_output.cwl
@@ -6,5 +6,5 @@ requirements:
inputs: []
outputs: []
arguments:
- - valueFrom: ${console.log("Log message");console.error("Error message");return ["python", "-c", "True"]}
- shellQuote: false
\ No newline at end of file
+ - valueFrom: ${console.log("Log message");console.error("Error message");return ["python3", "-c", "True"]}
+ shellQuote: false
diff --git a/tests/wf/mpi_expr.cwl b/tests/wf/mpi_expr.cwl
index 39819c557..2334b865d 100644
--- a/tests/wf/mpi_expr.cwl
+++ b/tests/wf/mpi_expr.cwl
@@ -13,7 +13,7 @@ doc: |
This version takes the number of processes to use as an input and
then passes this to the MPIRequirement using an expression.
-baseCommand: python
+baseCommand: python3
requirements:
cwltool:MPIRequirement:
processes: $(inputs.processes)
diff --git a/tests/wf/mpi_simple.cwl b/tests/wf/mpi_simple.cwl
index c3d8ef56b..6fe836748 100644
--- a/tests/wf/mpi_simple.cwl
+++ b/tests/wf/mpi_simple.cwl
@@ -10,7 +10,7 @@ doc: |
processes. Requires Python (but you have cwltool running, right?)
and an MPI implementation.
-baseCommand: python
+baseCommand: python3
requirements:
cwltool:MPIRequirement:
processes: 2
diff --git a/tests/wf/schemadef-bug-1473.cwl b/tests/wf/schemadef-bug-1473.cwl
index b586be1dd..ad87ae08e 100644
--- a/tests/wf/schemadef-bug-1473.cwl
+++ b/tests/wf/schemadef-bug-1473.cwl
@@ -449,7 +449,7 @@
"name": "#settings-by-samples__1.0.0.yaml/settings-by-samples/samples"
},
{
- "label": "settings by override cylces",
+ "label": "settings by override cycles",
"doc": "Additional bcl convert settings\n",
"type": [
"null",
diff --git a/tests/wf/shm_size.cwl b/tests/wf/shm_size.cwl
new file mode 100644
index 000000000..5b07ec5af
--- /dev/null
+++ b/tests/wf/shm_size.cwl
@@ -0,0 +1,17 @@
+#!/usr/bin/env cwl-runner
+class: CommandLineTool
+cwlVersion: v1.2
+requirements:
+ cwltool:ShmSize:
+ shmSize: 128m
+inputs: []
+
+outputs:
+ output:
+ type: stdout
+
+baseCommand: echo
+
+stdout: shm-size.txt
+
+arguments: [ $(runtime) ]
diff --git a/tests/wf/timelimit.cwl b/tests/wf/timelimit.cwl
index e15ebaddf..7af0d1dff 100644
--- a/tests/wf/timelimit.cwl
+++ b/tests/wf/timelimit.cwl
@@ -11,5 +11,5 @@ inputs:
outputs: []
requirements:
cwltool:TimeLimit:
- timelimit: 15
+ timelimit: 20
baseCommand: sleep
diff --git a/tox.ini b/tox.ini
index eaf6773af..71562af10 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,80 +1,80 @@
[tox]
envlist =
- py3{6,7,8,9,10,11}-lint
- py3{6,7,8,9,10,11}-unit
- py3{6,7,8,9,10,11}-bandit
- py3{7,8,9,10,11}-mypy
- py311-lintreadme
- py311-shellcheck
- py311-pydocstyle
+ py3{8,9,10,11,12}-lint
+ py3{8,9,10,11,12}-unit
+ py3{8,9,10,11,12}-bandit
+ py3{8,9,10,11,12}-mypy
+ py312-lintreadme
+ py312-shellcheck
+ py312-pydocstyle
skip_missing_interpreters = True
[pytest]
-addopts=--ignore cwltool/schemas -n auto
+addopts=--ignore cwltool/schemas -n logical --dist worksteal
testpaths = tests
[gh-actions]
python =
- 3.6: py36
- 3.7: py37
3.8: py38
3.9: py39
3.10: py310
3.11: py311
+ 3.12: py312
[testenv]
skipsdist =
- py3{6,7,8,9,10,11}-!{unit,mypy,lintreadme} = True
+ py3{8,9,10,11,12}-!{unit,mypy,lintreadme} = True
description =
- py3{6,7,8,9,10,11}-unit: Run the unit tests
- py3{6,7,8,9,10,11}-lint: Lint the Python code
- py3{6,7,8,9,10,11}-bandit: Search for common security issues
- py3{7,8,9,10,11}-mypy: Check for type safety
- py311-pydocstyle: docstring style checker
- py311-shellcheck: syntax check for shell scripts
- py311-lintreadme: Lint the README.rst→.md conversion
+ py3{8,9,10,11,12}-unit: Run the unit tests
+ py3{8,9,10,11,12}-lint: Lint the Python code
+ py3{8,9,10,11,12}-bandit: Search for common security issues
+ py3{8,9,10,11,12}-mypy: Check for type safety
+ py312-pydocstyle: docstring style checker
+ py312-shellcheck: syntax check for shell scripts
+ py312-lintreadme: Lint the README.rst→.md conversion
passenv =
CI
GITHUB_*
PROOT_NO_SECCOMP
+ APPTAINER_TMPDIR
+ SINGULARITY_FAKEROOT
extras =
- py3{6,7,8,9,10,11}-unit: deps
+ py3{8,9,10,11,12}-unit: deps
deps =
- py3{6,7,8,9,10,11}-{unit,lint,bandit,mypy}: -rrequirements.txt
- py3{6,7,8,9,10,11}-{unit,mypy}: -rtest-requirements.txt
- py3{6,7,8,9,10,11}-lint: -rlint-requirements.txt
- py3{6,7,8,9,10,11}-bandit: bandit
- py3{6,7,8,9,10,11}-bandit: importlib_metadata != 4.8.0
- py3{7,8,9,10,11}-mypy: -rmypy-requirements.txt
- py311-pydocstyle: pydocstyle
- py311-pydocstyle: diff-cover
- py311-lintreadme: twine
- py311-lintreadme: build
- py311-lintreadme: readme_renderer[rst]
+ py3{8,9,10,11,12}-{unit,lint,bandit,mypy}: -rrequirements.txt
+ py3{8,9,10,11,12}-{unit,mypy}: -rtest-requirements.txt
+ py3{8,9,10,11,12}-lint: -rlint-requirements.txt
+ py3{8,9,10,11,12}-bandit: bandit
+ py3{8,9,10,11,12}-mypy: -rmypy-requirements.txt
+ py312-pydocstyle: pydocstyle
+ py312-pydocstyle: diff-cover
+ py312-lintreadme: twine
+ py312-lintreadme: build
+ py312-lintreadme: readme_renderer[rst]
setenv =
- py3{6,7,8,9,10,11}-unit: LC_ALL = C.UTF-8
+ LC_ALL = C.UTF-8
+ HOME = {envtmpdir}
commands_pre =
- py3{6,7,8,9,10,11}-unit: python -m pip install -U pip setuptools wheel
- py311-lintreadme: python -m build --outdir {distdir}
+ py3{8,9,10,11,12}-unit: python -m pip install -U pip setuptools wheel
+ py312-lintreadme: python -m build --outdir {distdir}
commands =
- py3{6,7,8,9,10,11}-unit: make coverage-report coverage.xml PYTEST_EXTRA={posargs}
- py3{6,7,8,9,10,11}-bandit: bandit -r cwltool
- py3{6,7,8,9,10,11}-lint: make flake8 format-check codespell-check
- py3{7,8,9,10,11}-mypy: make mypy mypyc PYTEST_EXTRA={posargs}
- py37-mypy: make mypy_3.6
- py311-shellcheck: make shellcheck
- py311-pydocstyle: make diff_pydocstyle_report
- py311-lintreadme: twine check {distdir}/*
+ py3{8,9,10,11,12}-unit: make coverage-report coverage.xml PYTEST_EXTRA={posargs}
+ py3{8,9,10,11,12}-bandit: bandit -r cwltool
+ py3{8,9,10,11,12}-lint: make flake8 format-check codespell-check
+ py3{8,9,10,11,12}-mypy: make mypy mypyc PYTEST_EXTRA={posargs}
+ py312-shellcheck: make shellcheck
+ py312-pydocstyle: make diff_pydocstyle_report
+ py312-lintreadme: twine check {distdir}/*
skip_install =
- py3{6,7,8,9,10,11}-{bandit,lint,mypy,shellcheck,pydocstyle,lintreadme}: true
+ py3{8,9,10,11,12}-{bandit,lint,mypy,shellcheck,pydocstyle,lintreadme}: true
allowlist_externals = make