Import Upstream version 1.0.3
This commit is contained in:
parent
ec2c33e439
commit
04eff854ff
|
@ -0,0 +1,17 @@
|
||||||
|
name: CD
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_dispatch:
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- main
|
||||||
|
release:
|
||||||
|
types:
|
||||||
|
- published
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
dist:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: hynek/build-and-inspect-python-package@v1
|
|
@ -0,0 +1,59 @@
|
||||||
|
name: change detection
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
outputs:
|
||||||
|
run-docs:
|
||||||
|
description: Whether or not build the docs
|
||||||
|
value: ${{ jobs.change-detection.outputs.run-docs || false }}
|
||||||
|
run-tests:
|
||||||
|
description: Whether or not run the tests
|
||||||
|
value: ${{ jobs.change-detection.outputs.run-tests || false }}
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
change-detection:
|
||||||
|
name: Identify source changes
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
timeout-minutes: 1
|
||||||
|
outputs:
|
||||||
|
run-docs: ${{ steps.docs-changes.outputs.run-docs || false }}
|
||||||
|
run-tests: ${{ steps.tests-changes.outputs.run-tests || false }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- name: Get a list of the changed runtime-related files
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
id: changed-testable-files
|
||||||
|
uses: Ana06/get-changed-files@v2.2.0
|
||||||
|
with:
|
||||||
|
filter: |
|
||||||
|
src/**
|
||||||
|
tests/**
|
||||||
|
tox.ini
|
||||||
|
pyproject.toml
|
||||||
|
.github/workflows/test.yml
|
||||||
|
.github/workflows/reusable-type.yml
|
||||||
|
.github/workflows/reusable-pytest.yml
|
||||||
|
- name: Set a flag for running the tests
|
||||||
|
if: >-
|
||||||
|
github.event_name != 'pull_request'
|
||||||
|
|| steps.changed-testable-files.outputs.added_modified_renamed != ''
|
||||||
|
id: tests-changes
|
||||||
|
run: >-
|
||||||
|
echo "run-tests=true" >> "${GITHUB_OUTPUT}"
|
||||||
|
- name: Get a list of the changed documentation-related files
|
||||||
|
if: github.event_name == 'pull_request'
|
||||||
|
id: changed-docs-files
|
||||||
|
uses: Ana06/get-changed-files@v2.2.0
|
||||||
|
with:
|
||||||
|
filter: |
|
||||||
|
docs/**
|
||||||
|
CHANGELOG.rst
|
||||||
|
README.md
|
||||||
|
.github/workflows/test.yml
|
||||||
|
.github/workflows/reusable-check.yml
|
||||||
|
- name: Set a flag for building the docs
|
||||||
|
if: >-
|
||||||
|
github.event_name != 'pull_request'
|
||||||
|
|| steps.changed-docs-files.outputs.added_modified_renamed != ''
|
||||||
|
id: docs-changes
|
||||||
|
run: >-
|
||||||
|
echo "run-docs=true" >> "${GITHUB_OUTPUT}"
|
|
@ -1,13 +1,6 @@
|
||||||
name: check
|
name: check
|
||||||
on:
|
on:
|
||||||
push:
|
workflow_call:
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
pull_request:
|
|
||||||
branches:
|
|
||||||
- main
|
|
||||||
schedule:
|
|
||||||
- cron: "0 8 * * *"
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
docs:
|
docs:
|
||||||
|
@ -16,12 +9,12 @@ jobs:
|
||||||
PY_COLORS: 1
|
PY_COLORS: 1
|
||||||
TOX_PARALLEL_NO_SPINNER: 1
|
TOX_PARALLEL_NO_SPINNER: 1
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
- name: Setup Python 3.9
|
- name: Setup Python 3.10
|
||||||
uses: actions/setup-python@v4
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
python-version: "3.10"
|
||||||
|
|
||||||
- name: Install tox
|
- name: Install tox
|
||||||
run: python -m pip install tox
|
run: python -m pip install tox
|
|
@ -0,0 +1,102 @@
|
||||||
|
name: pytest
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
pytest:
|
||||||
|
runs-on: ${{ matrix.os }}-latest
|
||||||
|
env:
|
||||||
|
PYTEST_ADDOPTS: "--run-integration --showlocals -vv --durations=10 --reruns 5 --only-rerun subprocess.CalledProcessError"
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os:
|
||||||
|
- ubuntu
|
||||||
|
- macos
|
||||||
|
- windows
|
||||||
|
py:
|
||||||
|
- "pypy-3.7"
|
||||||
|
- "pypy-3.8"
|
||||||
|
- "pypy-3.9"
|
||||||
|
- "3.12"
|
||||||
|
- "3.11"
|
||||||
|
- "3.10"
|
||||||
|
- "3.9"
|
||||||
|
- "3.8"
|
||||||
|
- "3.7"
|
||||||
|
tox-target:
|
||||||
|
- "tox"
|
||||||
|
- "min"
|
||||||
|
|
||||||
|
continue-on-error: >- # jobs not required in branch protection
|
||||||
|
${{
|
||||||
|
(
|
||||||
|
startsWith(matrix.py, 'pypy-')
|
||||||
|
&& (!endsWith(matrix.py, '-3.7') || matrix.os == 'windows')
|
||||||
|
)
|
||||||
|
&& true
|
||||||
|
|| false
|
||||||
|
}}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
with:
|
||||||
|
fetch-depth: 0
|
||||||
|
|
||||||
|
- name: Setup python for test ${{ matrix.py }}
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.py }}
|
||||||
|
allow-prereleases: true
|
||||||
|
|
||||||
|
- name: Pick environment to run
|
||||||
|
run: |
|
||||||
|
import platform
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
if platform.python_implementation() == "PyPy":
|
||||||
|
base = f"pypy{sys.version_info.major}{sys.version_info.minor}"
|
||||||
|
else:
|
||||||
|
base = f"py{sys.version_info.major}{sys.version_info.minor}"
|
||||||
|
env = f"BASE={base}\n"
|
||||||
|
print(f"Picked:\n{env}for {sys.version}")
|
||||||
|
with open(os.environ["GITHUB_ENV"], "a", encoding="utf-8") as file:
|
||||||
|
file.write(env)
|
||||||
|
shell: python
|
||||||
|
|
||||||
|
- name: Setup python for tox
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
|
||||||
|
- name: Install tox
|
||||||
|
run: python -m pip install tox
|
||||||
|
|
||||||
|
- name: Run test suite via tox
|
||||||
|
if: matrix.tox-target == 'tox'
|
||||||
|
run: |
|
||||||
|
tox -vv --notest -e ${{env.BASE}}
|
||||||
|
tox -e ${{env.BASE}} --skip-pkg-install
|
||||||
|
|
||||||
|
- name: Run minimum version test
|
||||||
|
if: matrix.tox-target == 'min'
|
||||||
|
run: tox -e ${{env.BASE}}-${{ matrix.tox-target }}
|
||||||
|
|
||||||
|
- name: Run path test
|
||||||
|
if: matrix.tox-target == 'tox' && matrix.py == '3.10'
|
||||||
|
run: tox -e path
|
||||||
|
|
||||||
|
- name: Combine coverage files
|
||||||
|
if: always()
|
||||||
|
run: tox -e coverage
|
||||||
|
|
||||||
|
- uses: codecov/codecov-action@v3
|
||||||
|
if: always()
|
||||||
|
env:
|
||||||
|
PYTHON: ${{ matrix.python }}
|
||||||
|
with:
|
||||||
|
file: ./.tox/coverage.xml
|
||||||
|
flags: tests
|
||||||
|
env_vars: PYTHON
|
||||||
|
name: ${{ matrix.py }} - ${{ matrix.os }}
|
|
@ -0,0 +1,26 @@
|
||||||
|
name: type
|
||||||
|
on:
|
||||||
|
workflow_call:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
type:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
PY_COLORS: 1
|
||||||
|
TOX_PARALLEL_NO_SPINNER: 1
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
|
||||||
|
- name: Setup Python 3.9
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: 3.9
|
||||||
|
|
||||||
|
- name: Install tox
|
||||||
|
run: python -m pip install tox
|
||||||
|
|
||||||
|
- name: Setup run environment
|
||||||
|
run: tox -vv --notest -e type
|
||||||
|
|
||||||
|
- name: Run check for type
|
||||||
|
run: tox -e type --skip-pkg-install
|
|
@ -3,15 +3,9 @@ on:
|
||||||
push:
|
push:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
paths-ignore:
|
|
||||||
- "docs/**"
|
|
||||||
- "*.md"
|
|
||||||
pull_request:
|
pull_request:
|
||||||
branches:
|
branches:
|
||||||
- main
|
- main
|
||||||
paths-ignore:
|
|
||||||
- "docs/**"
|
|
||||||
- "*.md"
|
|
||||||
schedule:
|
schedule:
|
||||||
- cron: "0 8 * * *"
|
- cron: "0 8 * * *"
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
@ -21,111 +15,54 @@ concurrency:
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
change-detection:
|
||||||
|
uses: ./.github/workflows/reusable-change-detection.yml
|
||||||
|
|
||||||
|
check-docs:
|
||||||
|
needs: change-detection
|
||||||
|
if: fromJSON(needs.change-detection.outputs.run-docs)
|
||||||
|
uses: ./.github/workflows/reusable-docs.yml
|
||||||
|
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ${{ matrix.os }}-latest
|
needs: change-detection
|
||||||
env:
|
if: fromJSON(needs.change-detection.outputs.run-tests)
|
||||||
PYTEST_ADDOPTS: "--run-integration --showlocals -vv --durations=10 --reruns 5 --only-rerun subprocess.CalledProcessError"
|
uses: ./.github/workflows/reusable-pytest.yml
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
os:
|
|
||||||
- ubuntu
|
|
||||||
- macos
|
|
||||||
- windows
|
|
||||||
py:
|
|
||||||
- "pypy-3.7"
|
|
||||||
- "pypy-3.8"
|
|
||||||
- "pypy-3.9"
|
|
||||||
- "3.11"
|
|
||||||
- "3.10"
|
|
||||||
- "3.9"
|
|
||||||
- "3.8"
|
|
||||||
- "3.7"
|
|
||||||
- "3.6"
|
|
||||||
tox-target:
|
|
||||||
- "tox"
|
|
||||||
- "min"
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v3
|
|
||||||
with:
|
|
||||||
fetch-depth: 0
|
|
||||||
|
|
||||||
- name: Setup python for test ${{ matrix.py }}
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: ${{ matrix.py }}
|
|
||||||
|
|
||||||
- name: Pick environment to run
|
|
||||||
run: |
|
|
||||||
import platform
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
if platform.python_implementation() == "PyPy":
|
|
||||||
base = f"pypy{sys.version_info.major}{sys.version_info.minor}"
|
|
||||||
else:
|
|
||||||
base = f"py{sys.version_info.major}{sys.version_info.minor}"
|
|
||||||
env = f"BASE={base}\n"
|
|
||||||
print(f"Picked:\n{env}for {sys.version}")
|
|
||||||
with open(os.environ["GITHUB_ENV"], "a", encoding="utf-8") as file:
|
|
||||||
file.write(env)
|
|
||||||
shell: python
|
|
||||||
|
|
||||||
- name: Setup python for tox
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
|
||||||
python-version: 3.9
|
|
||||||
|
|
||||||
- name: Install tox
|
|
||||||
run: python -m pip install tox
|
|
||||||
|
|
||||||
- name: Run test suite via tox
|
|
||||||
if: matrix.tox-target == 'tox'
|
|
||||||
run: |
|
|
||||||
tox -vv --notest -e ${{env.BASE}}
|
|
||||||
tox -e ${{env.BASE}} --skip-pkg-install
|
|
||||||
|
|
||||||
- name: Run minimum version test
|
|
||||||
if: matrix.tox-target == 'min'
|
|
||||||
run: tox -e ${{env.BASE}}-${{ matrix.tox-target }}
|
|
||||||
|
|
||||||
- name: Run path test
|
|
||||||
if: matrix.tox-target == 'tox' && matrix.py == '3.10'
|
|
||||||
run: tox -e path
|
|
||||||
|
|
||||||
- name: Combine coverage files
|
|
||||||
if: always()
|
|
||||||
run: tox -e coverage
|
|
||||||
|
|
||||||
- uses: codecov/codecov-action@v3
|
|
||||||
if: always()
|
|
||||||
env:
|
|
||||||
PYTHON: ${{ matrix.python }}
|
|
||||||
with:
|
|
||||||
file: ./.tox/coverage.xml
|
|
||||||
flags: tests
|
|
||||||
env_vars: PYTHON
|
|
||||||
name: ${{ matrix.py }} - ${{ matrix.os }}
|
|
||||||
|
|
||||||
type:
|
type:
|
||||||
|
needs: change-detection
|
||||||
|
if: fromJSON(needs.change-detection.outputs.run-tests)
|
||||||
|
uses: ./.github/workflows/reusable-type.yml
|
||||||
|
|
||||||
|
# https://github.com/marketplace/actions/alls-green#why
|
||||||
|
required-checks-pass: # This job does nothing and is only used for the branch protection
|
||||||
|
if: always()
|
||||||
|
|
||||||
|
needs:
|
||||||
|
- change-detection # transitive
|
||||||
|
- check-docs
|
||||||
|
- pytest
|
||||||
|
- type
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
|
||||||
PY_COLORS: 1
|
|
||||||
TOX_PARALLEL_NO_SPINNER: 1
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- name: Decide whether the needed jobs succeeded or failed
|
||||||
|
uses: re-actors/alls-green@release/v1
|
||||||
- name: Setup Python 3.9
|
|
||||||
uses: actions/setup-python@v4
|
|
||||||
with:
|
with:
|
||||||
python-version: 3.9
|
allowed-skips: >-
|
||||||
|
${{
|
||||||
- name: Install tox
|
fromJSON(needs.change-detection.outputs.run-docs)
|
||||||
run: python -m pip install tox
|
&& ''
|
||||||
|
|| '
|
||||||
- name: Setup run environment
|
check-docs,
|
||||||
run: tox -vv --notest -e type
|
'
|
||||||
|
}}
|
||||||
- name: Run check for type
|
${{
|
||||||
run: tox -e type --skip-pkg-install
|
fromJSON(needs.change-detection.outputs.run-tests)
|
||||||
|
&& ''
|
||||||
|
|| '
|
||||||
|
pytest,
|
||||||
|
type,
|
||||||
|
'
|
||||||
|
}}
|
||||||
|
jobs: ${{ toJSON(needs) }}
|
||||||
|
|
|
@ -4,7 +4,7 @@ ci:
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v4.3.0
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: check-ast
|
- id: check-ast
|
||||||
- id: check-builtin-literals
|
- id: check-builtin-literals
|
||||||
|
@ -17,52 +17,40 @@ repos:
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: double-quote-string-fixer
|
- id: double-quote-string-fixer
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
- repo: https://github.com/abravalheri/validate-pyproject
|
||||||
rev: v3.1.0
|
rev: v0.14
|
||||||
hooks:
|
hooks:
|
||||||
- id: pyupgrade
|
- id: validate-pyproject
|
||||||
args: ["--py36-plus"]
|
- repo: https://github.com/psf/black-pre-commit-mirror
|
||||||
- repo: https://github.com/psf/black
|
rev: 23.7.0
|
||||||
rev: 22.10.0
|
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
- repo: https://github.com/asottile/blacken-docs
|
- repo: https://github.com/asottile/blacken-docs
|
||||||
rev: v1.12.1
|
rev: 1.16.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: blacken-docs
|
- id: blacken-docs
|
||||||
additional_dependencies: [black==22.6]
|
additional_dependencies: [black==23.7.0]
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||||
rev: "v3.0.0-alpha.2"
|
rev: "v3.0.3"
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: prettier
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||||
rev: 5.10.1
|
rev: v0.0.287
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: ruff
|
||||||
- repo: https://github.com/asottile/setup-cfg-fmt
|
args: [--fix, --format, grouped, --show-fixes]
|
||||||
rev: v2.1.0
|
|
||||||
hooks:
|
|
||||||
- id: setup-cfg-fmt
|
|
||||||
args: [--include-version-classifiers, --max-py-version=3.11]
|
|
||||||
- repo: https://github.com/PyCQA/flake8
|
|
||||||
rev: "5.0.4"
|
|
||||||
hooks:
|
|
||||||
- id: flake8
|
|
||||||
additional_dependencies: ["flake8-bugbear==22.7.1"]
|
|
||||||
language_version: python3.9
|
|
||||||
- repo: https://github.com/codespell-project/codespell
|
- repo: https://github.com/codespell-project/codespell
|
||||||
rev: "v2.2.2"
|
rev: "v2.2.5"
|
||||||
hooks:
|
hooks:
|
||||||
- id: codespell
|
- id: codespell
|
||||||
args: ["-L", "sur"]
|
args: ["-L", "sur"]
|
||||||
- repo: https://github.com/pre-commit/pygrep-hooks
|
- repo: https://github.com/pre-commit/pygrep-hooks
|
||||||
rev: "v1.9.0"
|
rev: "v1.10.0"
|
||||||
hooks:
|
hooks:
|
||||||
- id: python-check-blanket-noqa
|
|
||||||
- id: python-check-blanket-type-ignore
|
|
||||||
- id: python-no-log-warn
|
|
||||||
- id: python-no-eval
|
|
||||||
- id: python-use-type-annotations
|
|
||||||
- id: rst-backticks
|
- id: rst-backticks
|
||||||
- id: rst-directive-colons
|
- id: rst-directive-colons
|
||||||
- id: rst-inline-touching-normal
|
- id: rst-inline-touching-normal
|
||||||
|
- repo: https://github.com/tox-dev/tox-ini-fmt
|
||||||
|
rev: "1.3.1"
|
||||||
|
hooks:
|
||||||
|
- id: tox-ini-fmt
|
||||||
|
|
277
CHANGELOG.rst
277
CHANGELOG.rst
|
@ -2,257 +2,224 @@
|
||||||
Changelog
|
Changelog
|
||||||
+++++++++
|
+++++++++
|
||||||
|
|
||||||
|
1.0.3 (2023-09-06)
|
||||||
|
==================
|
||||||
|
|
||||||
|
- Avoid CPython 3.8.17, 3.9.17, 3.10.12, and 3.11.4 tarfile symlink bug
|
||||||
|
triggered by adding ``data_filter`` in 1.0.0.
|
||||||
|
(PR :pr:`675`, fixes issue :issue:`674`)
|
||||||
|
|
||||||
|
|
||||||
|
1.0.0 (2023-09-01)
|
||||||
|
==================
|
||||||
|
|
||||||
|
- Removed the ``toml`` library fallback; ``toml`` can no longer be used
|
||||||
|
as a substitute for ``tomli``
|
||||||
|
(PR :pr:`567`)
|
||||||
|
- Added ``runner`` parameter to ``util.project_wheel_metadata``
|
||||||
|
(PR :pr:`566`, fixes issue :issue:`553`)
|
||||||
|
- Modified ``ProjectBuilder`` constructor signature, added alternative
|
||||||
|
``ProjectBuilder.from_env`` constructor, redefined ``env.IsolatedEnv``
|
||||||
|
interface, and exposed ``env.DefaultIsolatedEnv``, replacing
|
||||||
|
``env.IsolatedEnvBuilder``. The aim has been to shift responsibility for
|
||||||
|
modifying the environment from the project builder to the ``IsolatedEnv``
|
||||||
|
entirely and to ensure that the builder will be initialised from an
|
||||||
|
``IsolatedEnv`` in a consistent manner. Mutating the project builder is no
|
||||||
|
longer supported.
|
||||||
|
(PR :pr:`537`)
|
||||||
|
- ``virtualenv`` is no longer imported when using ``-n``, for faster builds
|
||||||
|
(PR :pr:`636`, fixes issue :issue:`510`)
|
||||||
|
- The SDist now contains the repository contents, including tests. Flit-core
|
||||||
|
3.8+ required.
|
||||||
|
(PR :pr:`657`, :pr:`661`, fixes issue :issue:`656`)
|
||||||
|
- The minimum version of ``importlib-metadata`` has been increased to 4.6 and
|
||||||
|
Python 3.10 due to a bug in the standard library version with URL
|
||||||
|
requirements in extras. This is still not required for 3.8 when bootstrapping
|
||||||
|
(as long as you don't have URL requirements in extras).
|
||||||
|
(PR :pr:`631`, fixes issue :issue:`630`)
|
||||||
|
- Docs now built with Sphinx 7
|
||||||
|
(PR :pr:`660`)
|
||||||
|
- Tests now contain a ``network`` marker
|
||||||
|
(PR :pr:`649`, fixes issue :issue:`648`)
|
||||||
|
- Config-settings are now passed to ``get_requires*`` hooks, fixing a long
|
||||||
|
standing bug. If this affects your setuptools build, you can use
|
||||||
|
``-C--build-option=<cmd> -C--build-option=<option>`` to workaround an issue
|
||||||
|
with Setuptools not allowing unrecognised build options when running this
|
||||||
|
hook.
|
||||||
|
(PR :pr:`627`, fixes issue :issue:`#264`)
|
||||||
|
- Test on Python 3.12 betas/RCs
|
||||||
|
(PR :pr:`624`)
|
||||||
|
- Filter out malicious files when extracting tar archives when Python supports it
|
||||||
|
(PR :pr:`609`)
|
||||||
|
- Specify encoding, fixing issues when ``PYTHONWARNDEFAULTENCODING`` is set.
|
||||||
|
(PR :pr:`587`, fixes issue :issue:`577`)
|
||||||
|
- Ruff is now used for linting.
|
||||||
|
|
||||||
|
|
||||||
|
0.10.0 (2023-01-11)
|
||||||
|
===================
|
||||||
|
|
||||||
|
- Replace ``pep517`` dependency with ``pyproject_hooks``,
|
||||||
|
into which ``pep517`` has been renamed
|
||||||
|
(PR :pr:`539`, Fixes :issue:`529`)
|
||||||
|
- Change build backend from ``setuptools`` to ``flit``
|
||||||
|
(PR :pr:`470`, Fixes :issue:`394`)
|
||||||
|
- Dropped support for Python 3.6 (PR :pr:`532`)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.9.0 (2022-10-27)
|
0.9.0 (2022-10-27)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Hide a Python 3.11.0 unavoidable warning with venv (`PR #527`_)
|
- Hide a Python 3.11.0 unavoidable warning with venv (PR :pr:`527`)
|
||||||
- Fix infinite recursion error in ``check_dependency`` with circular
|
- Fix infinite recursion error in ``check_dependency`` with circular
|
||||||
dependencies (`PR #512`_, Fixes `#511`_)
|
dependencies (PR :pr:`512`, Fixes :issue:`511`)
|
||||||
- Only import colorama on Windows (`PR #494`_, Fixes `#493`_)
|
- Only import colorama on Windows (PR :pr:`494`, Fixes :issue:`493`)
|
||||||
- Flush output more often to reduce interleaved output (`PR #494`_)
|
- Flush output more often to reduce interleaved output (PR :pr:`494`)
|
||||||
- Small API cleanup, like better ``__all__`` and srcdir being read only. (`PR #477`_)
|
- Small API cleanup, like better ``_all__`` and srcdir being read only. (PR :pr:`477`)
|
||||||
- Only use ``importlib_metadata`` when needed (`PR #401`_)
|
- Only use ``importlib_metadata`` when needed (PR :pr:`401`)
|
||||||
- Clarify in printout when build dependencies are being installed (`PR #514`_)
|
- Clarify in printout when build dependencies are being installed (PR :pr:`514`)
|
||||||
|
|
||||||
.. _PR #401: https://github.com/pypa/build/pull/401
|
|
||||||
.. _PR #477: https://github.com/pypa/build/pull/477
|
|
||||||
.. _PR #494: https://github.com/pypa/build/pull/494
|
|
||||||
.. _PR #512: https://github.com/pypa/build/pull/512
|
|
||||||
.. _PR #514: https://github.com/pypa/build/pull/514
|
|
||||||
.. _PR #527: https://github.com/pypa/build/pull/527
|
|
||||||
.. _#493: https://github.com/pypa/build/issues/493
|
|
||||||
.. _#511: https://github.com/pypa/build/issues/511
|
|
||||||
|
|
||||||
|
|
||||||
0.8.0 (2022-05-22)
|
0.8.0 (2022-05-22)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Accept ``os.PathLike[str]`` in addition to ``str`` for paths in public
|
- Accept ``os.PathLike[str]`` in addition to ``str`` for paths in public
|
||||||
API (`PR #392`_, Fixes `#372`_)
|
API (PR :pr:`392`, Fixes :issue:`372`)
|
||||||
- Add schema validation for ``build-system`` table to check conformity
|
- Add schema validation for ``build-system`` table to check conformity
|
||||||
with PEP 517 and PEP 518 (`PR #365`_, Fixes `#364`_)
|
with PEP 517 and PEP 518 (PR :pr:`365`, Fixes :issue:`364`)
|
||||||
- Better support for Python 3.11 (sysconfig schemes `PR #434`_, `PR #463`_, tomllib `PR #443`_, warnings `PR #420`_)
|
- Better support for Python 3.11 (sysconfig schemes PR :pr:`434`, PR :pr:`463`, tomllib PR :pr:`443`, warnings PR :pr:`420`)
|
||||||
- Improved error printouts (`PR #442`_)
|
- Improved error printouts (PR :pr:`442`)
|
||||||
- Avoid importing packaging unless needed (`PR #395`_, Fixes `#393`_)
|
- Avoid importing packaging unless needed (PR :pr:`395`, Fixes :issue:`393`)
|
||||||
|
|
||||||
|
|
||||||
Breaking Changes
|
Breaking Changes
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
- Failure to create a virtual environment in the ``build.env`` module now raises
|
- Failure to create a virtual environment in the ``build.env`` module now raises
|
||||||
``build.FailedProcessError`` (`PR #442`_)
|
``build.FailedProcessError`` (PR :pr:`442`)
|
||||||
|
|
||||||
.. _PR #365: https://github.com/pypa/build/pull/365
|
|
||||||
.. _PR #392: https://github.com/pypa/build/pull/392
|
|
||||||
.. _PR #395: https://github.com/pypa/build/pull/395
|
|
||||||
.. _PR #420: https://github.com/pypa/build/pull/420
|
|
||||||
.. _PR #434: https://github.com/pypa/build/pull/434
|
|
||||||
.. _PR #442: https://github.com/pypa/build/pull/442
|
|
||||||
.. _PR #443: https://github.com/pypa/build/pull/443
|
|
||||||
.. _PR #463: https://github.com/pypa/build/pull/463
|
|
||||||
.. _#364: https://github.com/pypa/build/issues/364
|
|
||||||
.. _#372: https://github.com/pypa/build/issues/372
|
|
||||||
.. _#393: https://github.com/pypa/build/pull/393
|
|
||||||
|
|
||||||
|
|
||||||
0.7.0 (2021-09-16)
|
0.7.0 (2021-09-16)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Add ``build.util`` module with an high-level utility API (`PR #340`_)
|
- Add ``build.util`` module with an high-level utility API (PR :pr:`340`)
|
||||||
|
|
||||||
.. _PR #340: https://github.com/pypa/build/pull/340
|
|
||||||
|
|
||||||
|
|
||||||
0.6.0.post1 (2021-08-05)
|
0.6.0.post1 (2021-08-05)
|
||||||
========================
|
========================
|
||||||
|
|
||||||
- Fix compatibility with Python 3.6 and 3.7 (`PR #339`_, Fixes `#338`_)
|
- Fix compatibility with Python 3.6 and 3.7 (PR :pr:`339`, Fixes :issue:`338`)
|
||||||
|
|
||||||
.. _PR #339: https://github.com/pypa/build/pull/339
|
|
||||||
.. _#338: https://github.com/pypa/build/issues/338
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.6.0 (2021-08-02)
|
0.6.0 (2021-08-02)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Improved output (`PR #333`_, Fixes `#142`_)
|
- Improved output (PR :pr:`333`, Fixes :issue:`142`)
|
||||||
- The CLI now honors `NO_COLOR`_ (`PR #333`_)
|
- The CLI now honors ``NO_COLOR`` (PR :pr:`333`)
|
||||||
- The CLI can now be forced to colorize the output by setting the ``FORCE_COLOR`` environment variable (`PR #335`_)
|
- The CLI can now be forced to colorize the output by setting the ``FORCE_COLOR`` environment variable (PR :pr:`335`)
|
||||||
- Added logging to ``build`` and ``build.env`` (`PR #333`_)
|
- Added logging to ``build`` and ``build.env`` (PR :pr:`333`)
|
||||||
- Switch to a TOML v1 compliant parser (`PR #336`_, Fixes `#308`_)
|
- Switch to a TOML v1 compliant parser (PR :pr:`336`, Fixes :issue:`308`)
|
||||||
|
|
||||||
|
|
||||||
Breaking Changes
|
Breaking Changes
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
- Dropped support for Python 2 and 3.5.
|
- Dropped support for Python 2 and 3.5.
|
||||||
|
|
||||||
.. _PR #333: https://github.com/pypa/build/pull/333
|
|
||||||
.. _PR #335: https://github.com/pypa/build/pull/335
|
|
||||||
.. _PR #336: https://github.com/pypa/build/pull/336
|
|
||||||
.. _#142: https://github.com/pypa/build/issues/142
|
|
||||||
.. _#308: https://github.com/pypa/build/issues/308
|
|
||||||
.. _NO_COLOR: https://no-color.org
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.5.1 (2021-06-22)
|
0.5.1 (2021-06-22)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Fix invoking the backend on an inexistent output directory with multiple levels (`PR #318`_, Fixes `#316`_)
|
- Fix invoking the backend on an inexistent output directory with multiple levels (PR :pr:`318`, Fixes :issue:`316`)
|
||||||
- When building wheels via sdists, use an isolated temporary directory (`PR #321`_, Fixes `#320`_)
|
- When building wheels via sdists, use an isolated temporary directory (PR :pr:`321`, Fixes :issue:`320`)
|
||||||
|
|
||||||
.. _PR #318: https://github.com/pypa/build/pull/318
|
|
||||||
.. _PR #321: https://github.com/pypa/build/pull/321
|
|
||||||
.. _#316: https://github.com/pypa/build/issues/316
|
|
||||||
.. _#320: https://github.com/pypa/build/issues/320
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.5.0 (2021-06-19)
|
0.5.0 (2021-06-19)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Add ``ProjectBuilder.metadata_path`` helper (`PR #303`_, Fixes `#301`_)
|
- Add ``ProjectBuilder.metadata_path`` helper (PR :pr:`303`, Fixes :issue:`301`)
|
||||||
- Added a ``build.__main__.build_package_via_sdist`` method (`PR #304`_)
|
- Added a ``build.__main__.build_package_via_sdist`` method (PR :pr:`304`)
|
||||||
- Use appropriate installation scheme for Apple Python venvs (`PR #314`_, Fixes `#310`_)
|
- Use appropriate installation scheme for Apple Python venvs (PR :pr:`314`, Fixes :issue:`310`)
|
||||||
|
|
||||||
Breaking Changes
|
Breaking Changes
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
- Binary distributions are now built via the sdist by default in the CLI (`PR #304`_, Fixes `#257`_)
|
- Binary distributions are now built via the sdist by default in the CLI (PR :pr:`304`, Fixes :issue:`257`)
|
||||||
- ``python -m build`` will now build a sdist, extract it, and build a wheel from the source
|
- ``python -m build`` will now build a sdist, extract it, and build a wheel from the source
|
||||||
- As a side-effect of `PR #304`_, ``build.__main__.build_package`` no longer does CLI error handling (print nice message and exit the program)
|
- As a side-effect of PR :pr:`304`, ``build.__main__.build_package`` no longer does CLI error handling (print nice message and exit the program)
|
||||||
- Importing ``build.__main__`` no longer has any side-effects, it no longer overrides ``warnings.showwarning`` or runs ``colorama.init`` on import (`PR #312`_)
|
- Importing ``build.__main__`` no longer has any side-effects, it no longer overrides ``warnings.showwarning`` or runs ``colorama.init`` on import (PR :pr:`312`)
|
||||||
|
|
||||||
.. _PR #303: https://github.com/pypa/build/pull/303
|
|
||||||
.. _PR #304: https://github.com/pypa/build/pull/304
|
|
||||||
.. _PR #312: https://github.com/pypa/build/pull/312
|
|
||||||
.. _PR #314: https://github.com/pypa/build/pull/314
|
|
||||||
.. _#257: https://github.com/pypa/build/issues/257
|
|
||||||
.. _#301: https://github.com/pypa/build/issues/301
|
|
||||||
.. _#310: https://github.com/pypa/build/issues/310
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.4.0 (2021-05-23)
|
0.4.0 (2021-05-23)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Validate that the supplied source directory is valid (`PR #260`_, Fixes `#259`_)
|
- Validate that the supplied source directory is valid (PR :pr:`260`, Fixes :issue:`259`)
|
||||||
- Set and test minimum versions of build's runtime dependencies (`PR #267`_, Fixes `#263`_)
|
- Set and test minimum versions of build's runtime dependencies (PR :pr:`267`, Fixes :issue:`263`)
|
||||||
- Use symlinks on creating venv's when available (`PR #274`_, Fixes `#271`_)
|
- Use symlinks on creating venv's when available (PR :pr:`274`, Fixes :issue:`271`)
|
||||||
- Error sooner if pip upgrade is required and fails (`PR #288`_, Fixes `#256`_)
|
- Error sooner if pip upgrade is required and fails (PR :pr:`288`, Fixes :issue:`256`)
|
||||||
- Add a ``runner`` argument to ``ProjectBuilder`` (`PR #290`_, Fixes `#289`_)
|
- Add a ``runner`` argument to ``ProjectBuilder`` (PR :pr:`290`, Fixes :issue:`289`)
|
||||||
- Hide irrelevant ``pep517`` error traceback and improve error messages (`PR #296`_)
|
- Hide irrelevant ``pep517`` error traceback and improve error messages (PR :pr:`296`)
|
||||||
- Try to use ``colorama`` to fix colors on Windows (`PR #300`_)
|
- Try to use ``colorama`` to fix colors on Windows (PR :pr:`300`)
|
||||||
|
|
||||||
.. _PR #260: https://github.com/pypa/build/pull/260
|
|
||||||
.. _PR #267: https://github.com/pypa/build/pull/267
|
|
||||||
.. _PR #274: https://github.com/pypa/build/pull/274
|
|
||||||
.. _PR #288: https://github.com/pypa/build/pull/288
|
|
||||||
.. _PR #290: https://github.com/pypa/build/pull/290
|
|
||||||
.. _PR #296: https://github.com/pypa/build/pull/296
|
|
||||||
.. _PR #300: https://github.com/pypa/build/pull/300
|
|
||||||
.. _#256: https://github.com/pypa/build/issues/256
|
|
||||||
.. _#259: https://github.com/pypa/build/issues/259
|
|
||||||
.. _#263: https://github.com/pypa/build/issues/263
|
|
||||||
.. _#271: https://github.com/pypa/build/issues/271
|
|
||||||
.. _#289: https://github.com/pypa/build/issues/289
|
|
||||||
|
|
||||||
Breaking Changes
|
Breaking Changes
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
- As a side-effect of `PR #260`_, projects not containing either a ``pyproject.toml`` or ``setup.py`` will be reported as invalid. This affects projects specifying only a ``setup.cfg``, such projects are recommended to add a ``pyproject.toml``. The new behavior is on par with what pip currently does, so if you are affected by this, your project should not be pip installable.
|
- As a side-effect of PR :pr:`260`, projects not containing either a ``pyproject.toml`` or ``setup.py`` will be reported as invalid. This affects projects specifying only a ``setup.cfg``, such projects are recommended to add a ``pyproject.toml``. The new behavior is on par with what pip currently does, so if you are affected by this, your project should not be pip installable.
|
||||||
- The ``--skip-dependencies`` option has been renamed to ``--skip-dependency-check`` (`PR #297`_)
|
- The ``--skip-dependencies`` option has been renamed to ``--skip-dependency-check`` (PR :pr:`297`)
|
||||||
- The ``skip_dependencies`` argument of ``build.__main__.build_package`` has been renamed to ``skip_dependency_check`` (`PR #297`_)
|
- The ``skip_dependencies`` argument of ``build.__main__.build_package`` has been renamed to ``skip_dependency_check`` (PR :pr:`297`)
|
||||||
- ``build.ConfigSettings`` has been renamed to ``build.ConfigSettingsType`` (`PR #298`_)
|
- ``build.ConfigSettings`` has been renamed to ``build.ConfigSettingsType`` (PR :pr:`298`)
|
||||||
- ``build.ProjectBuilder.build_dependencies`` to ``build.ProjectBuilder.build_system_requires`` (`PR #284`_, Fixes `#182`_)
|
- ``build.ProjectBuilder.build_dependencies`` to ``build.ProjectBuilder.build_system_requires`` (PR :pr:`284`, Fixes :issue:`182`)
|
||||||
- ``build.ProjectBuilder.get_dependencies`` to ``build.ProjectBuilder.get_requires_for_build`` (`PR #284`_, Fixes `#182`_)
|
- ``build.ProjectBuilder.get_dependencies`` to ``build.ProjectBuilder.get_requires_for_build`` (PR :pr:`284`, Fixes :issue:`182`)
|
||||||
|
|
||||||
.. _PR #284: https://github.com/pypa/build/pull/284
|
|
||||||
.. _PR #297: https://github.com/pypa/build/pull/297
|
|
||||||
.. _PR #298: https://github.com/pypa/build/pull/298
|
|
||||||
.. _#182: https://github.com/pypa/build/issues/182
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.3.1 (2021-03-09)
|
0.3.1 (2021-03-09)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Support direct usage from pipx run in 0.16.1.0+ (`PR #247`_)
|
- Support direct usage from pipx run in 0.16.1.0+ (PR :pr:`247`)
|
||||||
- Use UTF-8 encoding when reading pyproject.toml (`PR #251`_, Fixes `#250`_)
|
- Use UTF-8 encoding when reading pyproject.toml (PR :pr:`251`, Fixes :issue:`250`)
|
||||||
|
|
||||||
.. _PR #247: https://github.com/pypa/build/pull/247
|
|
||||||
.. _PR #251: https://github.com/pypa/build/pull/251
|
|
||||||
.. _#250: https://github.com/pypa/build/issues/250
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.3.0 (2021-02-19)
|
0.3.0 (2021-02-19)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Upgrade pip based on venv pip version, avoids error on Debian Python 3.6.5-3.8 or issues installing wheels on Big Sur (`PR #229`_, `PR #230`_, Fixes `#228`_)
|
- Upgrade pip based on venv pip version, avoids error on Debian Python 3.6.5-3.8 or issues installing wheels on Big Sur (PR :pr:`229`, PR :pr:`230`, Fixes :issue:`228`)
|
||||||
- Build dependencies in isolation, instead of in the build environment (`PR #232`_, Fixes `#231`_)
|
- Build dependencies in isolation, instead of in the build environment (PR :pr:`232`, Fixes :issue:`231`)
|
||||||
- Fallback on venv if virtualenv is too old (`PR #241`_)
|
- Fallback on venv if virtualenv is too old (PR :pr:`241`)
|
||||||
- Add metadata preparation hook (`PR #217`_, Fixes `#130`_)
|
- Add metadata preparation hook (PR :pr:`217`, Fixes :issue:`130`)
|
||||||
|
|
||||||
.. _PR #217: https://github.com/pypa/build/pull/217
|
|
||||||
.. _PR #229: https://github.com/pypa/build/pull/229
|
|
||||||
.. _PR #230: https://github.com/pypa/build/pull/230
|
|
||||||
.. _PR #232: https://github.com/pypa/build/pull/232
|
|
||||||
.. _PR #241: https://github.com/pypa/build/pull/241
|
|
||||||
.. _#130: https://github.com/pypa/build/issues/130
|
|
||||||
.. _#228: https://github.com/pypa/build/issues/228
|
|
||||||
.. _#231: https://github.com/pypa/build/issues/231
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.2.1 (2021-02-09)
|
0.2.1 (2021-02-09)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Fix error from unrecognised pip flag on Python 3.6.0 to 3.6.5 (`PR #227`_, Fixes `#226`_)
|
- Fix error from unrecognised pip flag on Python 3.6.0 to 3.6.5 (PR :pr:`227`, Fixes :issue:`226`)
|
||||||
|
|
||||||
.. _PR #227: https://github.com/pypa/build/pull/227
|
|
||||||
.. _#226: https://github.com/pypa/build/issues/226
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
0.2.0 (2021-02-07)
|
0.2.0 (2021-02-07)
|
||||||
==================
|
==================
|
||||||
|
|
||||||
- Check dependencies recursively (`PR #183`_, Fixes `#25`_)
|
- Check dependencies recursively (PR :pr:`183`, Fixes :issue:`25`)
|
||||||
- Build wheel and sdist distributions in separate environments, as they may have different dependencies (`PR #195`_, Fixes `#194`_)
|
- Build wheel and sdist distributions in separate environments, as they may have different dependencies (PR :pr:`195`, Fixes :issue:`194`)
|
||||||
- Add support for pre-releases in ``check_dependency`` (`PR #204`_, Fixes `#191`_)
|
- Add support for pre-releases in ``check_dependency`` (PR :pr:`204`, Fixes :issue:`191`)
|
||||||
- Fixes console scripts not being available during build (`PR #221`_, Fixes `#214`_)
|
- Fixes console scripts not being available during build (PR :pr:`221`, Fixes :issue:`214`)
|
||||||
- Do not add the default backend requirements to ``requires`` when no backend is specified (`PR #177`_, Fixes `#107`_)
|
- Do not add the default backend requirements to ``requires`` when no backend is specified (PR :pr:`177`, Fixes :issue:`107`)
|
||||||
- Return the sdist name in ``ProjectBuild.build`` (`PR #197`_)
|
- Return the sdist name in ``ProjectBuild.build`` (PR :pr:`197`)
|
||||||
- Improve documentation (`PR #178`_, `PR #203`_)
|
- Improve documentation (PR :pr:`178`, PR :pr:`203`)
|
||||||
- Add changelog (`PR #219`_, Fixes `#169`_)
|
- Add changelog (PR :pr:`219`, Fixes :issue:`169`)
|
||||||
|
|
||||||
Breaking changes
|
Breaking changes
|
||||||
----------------
|
----------------
|
||||||
|
|
||||||
- Move ``config_settings`` argument to the hook calls (`PR #218`_, Fixes `#216`_)
|
- Move ``config_settings`` argument to the hook calls (PR :pr:`218`, Fixes :issue:`216`)
|
||||||
|
|
||||||
.. _PR #177: https://github.com/pypa/build/pull/177
|
|
||||||
.. _PR #178: https://github.com/pypa/build/pull/178
|
|
||||||
.. _PR #183: https://github.com/pypa/build/pull/183
|
|
||||||
.. _PR #195: https://github.com/pypa/build/pull/195
|
|
||||||
.. _PR #197: https://github.com/pypa/build/pull/197
|
|
||||||
.. _PR #203: https://github.com/pypa/build/pull/203
|
|
||||||
.. _PR #204: https://github.com/pypa/build/pull/204
|
|
||||||
.. _PR #218: https://github.com/pypa/build/pull/218
|
|
||||||
.. _PR #219: https://github.com/pypa/build/pull/219
|
|
||||||
.. _PR #221: https://github.com/pypa/build/pull/221
|
|
||||||
.. _#25: https://github.com/pypa/build/issues/25
|
|
||||||
.. _#107: https://github.com/pypa/build/issues/107
|
|
||||||
.. _#109: https://github.com/pypa/build/issues/109
|
|
||||||
.. _#169: https://github.com/pypa/build/issues/169
|
|
||||||
.. _#191: https://github.com/pypa/build/issues/191
|
|
||||||
.. _#194: https://github.com/pypa/build/issues/194
|
|
||||||
.. _#214: https://github.com/pypa/build/issues/214
|
|
||||||
.. _#216: https://github.com/pypa/build/issues/216
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,15 +1,14 @@
|
||||||
# build
|
# build
|
||||||
|
|
||||||
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pypa/build/main.svg)](https://results.pre-commit.ci/latest/github/pypa/build/main)
|
[![pre-commit.ci status](https://results.pre-commit.ci/badge/github/pypa/build/main.svg)](https://results.pre-commit.ci/latest/github/pypa/build/main)
|
||||||
[![CI check](https://github.com/pypa/build/workflows/check/badge.svg)](https://github.com/pypa/build/actions)
|
|
||||||
[![CI test](https://github.com/pypa/build/actions/workflows/test.yml/badge.svg)](https://github.com/pypa/build/actions/workflows/test.yml)
|
[![CI test](https://github.com/pypa/build/actions/workflows/test.yml/badge.svg)](https://github.com/pypa/build/actions/workflows/test.yml)
|
||||||
[![codecov](https://codecov.io/gh/pypa/build/branch/main/graph/badge.svg)](https://codecov.io/gh/pypa/build)
|
[![codecov](https://codecov.io/gh/pypa/build/branch/main/graph/badge.svg)](https://codecov.io/gh/pypa/build)
|
||||||
|
|
||||||
[![Documentation Status](https://readthedocs.org/projects/pypa-build/badge/?version=latest)](https://pypa-build.readthedocs.io/en/latest/?badge=latest)
|
[![Documentation Status](https://readthedocs.org/projects/pypa-build/badge/?version=latest)](https://pypa-build.readthedocs.io/en/latest/?badge=latest)
|
||||||
[![PyPI version](https://badge.fury.io/py/build.svg)](https://pypi.org/project/build/)
|
[![PyPI version](https://badge.fury.io/py/build.svg)](https://pypi.org/project/build/)
|
||||||
[![Discord](https://img.shields.io/discord/803025117553754132?label=Discord%20chat%20%23build&style=flat-square)](https://discord.gg/pypa)
|
[![Discord](https://img.shields.io/discord/803025117553754132?label=Discord%20chat%20%23build)](https://discord.gg/pypa)
|
||||||
|
|
||||||
A simple, correct PEP 517 build frontend.
|
A simple, correct Python build frontend.
|
||||||
|
|
||||||
See the [documentation](https://pypa-build.readthedocs.io/en/latest/) for more information.
|
See the [documentation](https://pypa-build.readthedocs.io/en/latest/) for more information.
|
||||||
|
|
||||||
|
|
|
@ -1,5 +0,0 @@
|
||||||
python-build (0.9.0-ok1) yangtze; urgency=medium
|
|
||||||
|
|
||||||
* Build for openkylin.
|
|
||||||
|
|
||||||
-- sufang <sufang@kylinos.cn> Mon, 30 Jan 2023 15:19:05 +0800
|
|
|
@ -1,34 +0,0 @@
|
||||||
Source: python-build
|
|
||||||
Section: python
|
|
||||||
Priority: optional
|
|
||||||
Maintainer: OpenKylin Developers <packaging@lists.openkylin.top>
|
|
||||||
Build-Depends: debhelper-compat (= 13),
|
|
||||||
dh-python,
|
|
||||||
python3-setuptools,
|
|
||||||
python3-all,
|
|
||||||
python3-packaging,
|
|
||||||
python3-pep517,
|
|
||||||
python3-toml,
|
|
||||||
Standards-Version: 4.6.2
|
|
||||||
Homepage: https://github.com/pypa/build
|
|
||||||
Vcs-Browser: https://gitee.com/openkylin/python-build
|
|
||||||
Vcs-Git: https://gitee.com/openkylin/python-build.git
|
|
||||||
Testsuite: autopkgtest-pkg-python
|
|
||||||
Rules-Requires-Root: no
|
|
||||||
|
|
||||||
Package: python3-build
|
|
||||||
Architecture: all
|
|
||||||
Depends: ${python3:Depends},
|
|
||||||
${misc:Depends},
|
|
||||||
python3-packaging,
|
|
||||||
python3-pep517,
|
|
||||||
python3-toml,
|
|
||||||
python3-wheel,
|
|
||||||
Suggests: python3-pip,
|
|
||||||
python3-venv,
|
|
||||||
Description: Simple, correct PEP517 package builder (Python 3)
|
|
||||||
python-build will invoke the PEP 517 hooks to build a distribution
|
|
||||||
package. It is a simple build tool and does not perform any
|
|
||||||
dependency management.
|
|
||||||
.
|
|
||||||
This package installs the library for Python 3.
|
|
|
@ -1,114 +0,0 @@
|
||||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
|
||||||
Upstream-Name: python-build
|
|
||||||
Source: <url://example.com>
|
|
||||||
#
|
|
||||||
# Please double check copyright with the licensecheck(1) command.
|
|
||||||
|
|
||||||
Files: .dockerignore
|
|
||||||
.github/CODEOWNERS
|
|
||||||
.github/dependabot.yml
|
|
||||||
.github/workflows/check.yml
|
|
||||||
.github/workflows/test.yml
|
|
||||||
.gitignore
|
|
||||||
.pre-commit-config.yaml
|
|
||||||
.readthedocs.yml
|
|
||||||
CHANGELOG.rst
|
|
||||||
README.md
|
|
||||||
codecov.yml
|
|
||||||
docs/api.rst
|
|
||||||
docs/differences.rst
|
|
||||||
docs/index.rst
|
|
||||||
docs/installation.rst
|
|
||||||
docs/mission.rst
|
|
||||||
docs/test_suite.rst
|
|
||||||
pyproject.toml
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
src/build/__init__.py
|
|
||||||
src/build/__main__.py
|
|
||||||
src/build/env.py
|
|
||||||
src/build/py.typed
|
|
||||||
src/build/util.py
|
|
||||||
tests/conftest.py
|
|
||||||
tests/constraints.txt
|
|
||||||
tests/packages/inline/pyproject.toml
|
|
||||||
tests/packages/legacy/legacy/__init__.py
|
|
||||||
tests/packages/legacy/setup.py
|
|
||||||
tests/packages/test-bad-backend/pyproject.toml
|
|
||||||
tests/packages/test-bad-syntax/pyproject.toml
|
|
||||||
tests/packages/test-bad-wheel/backend_bad_wheel.py
|
|
||||||
tests/packages/test-bad-wheel/pyproject.toml
|
|
||||||
tests/packages/test-bad-wheel/setup.cfg
|
|
||||||
tests/packages/test-cant-build-via-sdist/backend_bad_sdist.py
|
|
||||||
tests/packages/test-cant-build-via-sdist/pyproject.toml
|
|
||||||
tests/packages/test-cant-build-via-sdist/some-file-that-is-needed-for-build.txt
|
|
||||||
tests/packages/test-flit/pyproject.toml
|
|
||||||
tests/packages/test-flit/test_flit/__init__.py
|
|
||||||
tests/packages/test-invalid-requirements/pyproject.toml
|
|
||||||
tests/packages/test-invalid-requirements/setup.cfg
|
|
||||||
tests/packages/test-metadata/backend.py
|
|
||||||
tests/packages/test-metadata/pyproject.toml
|
|
||||||
tests/packages/test-no-backend/pyproject.toml
|
|
||||||
tests/packages/test-no-permission/pyproject.toml
|
|
||||||
tests/packages/test-no-prepare/backend_no_prepare.py
|
|
||||||
tests/packages/test-no-prepare/pyproject.toml
|
|
||||||
tests/packages/test-no-prepare/setup.cfg
|
|
||||||
tests/packages/test-no-project/empty.txt
|
|
||||||
tests/packages/test-no-requires/pyproject.toml
|
|
||||||
tests/packages/test-optional-hooks/hookless_backend.py
|
|
||||||
tests/packages/test-optional-hooks/pyproject.toml
|
|
||||||
tests/packages/test-setuptools/pyproject.toml
|
|
||||||
tests/packages/test-setuptools/setup.cfg
|
|
||||||
tests/packages/test-typo/pyproject.toml
|
|
||||||
tests/test_env.py
|
|
||||||
tests/test_integration.py
|
|
||||||
tests/test_main.py
|
|
||||||
tests/test_module.py
|
|
||||||
tests/test_projectbuilder.py
|
|
||||||
tests/test_self_packaging.py
|
|
||||||
tests/test_util.py
|
|
||||||
tox.ini
|
|
||||||
Copyright: __NO_COPYRIGHT_NOR_LICENSE__
|
|
||||||
License: __NO_COPYRIGHT_NOR_LICENSE__
|
|
||||||
|
|
||||||
Files: tests/packages/inline/build.py
|
|
||||||
Copyright: __NO_COPYRIGHT__ in: tests/packages/inline/build.py
|
|
||||||
License: __UNKNOWN__
|
|
||||||
Desc
|
|
||||||
.
|
|
||||||
Wheel-Version: 1.0
|
|
||||||
Generator: {name}-{version}
|
|
||||||
Root-Is-Purelib: true
|
|
||||||
Tag: py3-none-any
|
|
||||||
|
|
||||||
Files: docs/conf.py
|
|
||||||
Copyright: __NO_COPYRIGHT__ in: docs/conf.py
|
|
||||||
License: __UNKNOWN__
|
|
||||||
# The short X.Y version
|
|
||||||
|
|
||||||
#----------------------------------------------------------------------------
|
|
||||||
# Files marked as NO_LICENSE_TEXT_FOUND may be covered by the following
|
|
||||||
# license/copyright files.
|
|
||||||
|
|
||||||
#----------------------------------------------------------------------------
|
|
||||||
# License file: LICENSE
|
|
||||||
Copyright © 2019 Filipe Laíns <filipe.lains@gmail.com>
|
|
||||||
.
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a
|
|
||||||
copy of this software and associated documentation files (the "Software"),
|
|
||||||
to deal in the Software without restriction, including without limitation
|
|
||||||
the rights to use, copy, modify, merge, publish, distribute, sublicense,
|
|
||||||
and/or sell copies of the Software, and to permit persons to whom the
|
|
||||||
Software is furnished to do so, subject to the following conditions:
|
|
||||||
.
|
|
||||||
The above copyright notice and this permission notice (including the next
|
|
||||||
paragraph) shall be included in all copies or substantial portions of the
|
|
||||||
Software.
|
|
||||||
.
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
||||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
||||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
|
|
||||||
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
||||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
|
|
||||||
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
|
|
||||||
DEALINGS IN THE SOFTWARE.
|
|
|
@ -1 +0,0 @@
|
||||||
# You must remove unused comment lines for the released package.
|
|
|
@ -1,20 +0,0 @@
|
||||||
#!/usr/bin/make -f
|
|
||||||
|
|
||||||
export PYBUILD_NAME=build
|
|
||||||
export SETUPTOOLS_USE_DISTUTILS=stdlib
|
|
||||||
|
|
||||||
%:
|
|
||||||
dh $@ --with python3 --buildsystem=pybuild
|
|
||||||
|
|
||||||
# Unfortunately, python-build's testsuite relies heavily on "pip
|
|
||||||
# install" and other network-related operations, which are not not
|
|
||||||
# allowed during build time.
|
|
||||||
#
|
|
||||||
# Although not all tests are affected by this, it is becoming more and
|
|
||||||
# more difficult to maintain a list of tests that should be disabled
|
|
||||||
# because of this issue. For this reason, and in order to keep the
|
|
||||||
# maintenance burden low, we decided to disable running these tests
|
|
||||||
# during build time.
|
|
||||||
#
|
|
||||||
# TODO: Create a dep8 test that runs the entire upstream testsuite.
|
|
||||||
override_dh_auto_test:
|
|
|
@ -1 +0,0 @@
|
||||||
3.0 (native)
|
|
|
@ -1,5 +0,0 @@
|
||||||
---
|
|
||||||
Bug-Database: https://github.com/pypa/build/issues
|
|
||||||
Bug-Submit: https://github.com/pypa/build/issues/new
|
|
||||||
Repository: https://github.com/pypa/build.git
|
|
||||||
Repository-Browse: https://github.com/pypa/build
|
|
|
@ -1,4 +0,0 @@
|
||||||
# Compulsory line, this is a version 4 file
|
|
||||||
version=4
|
|
||||||
|
|
||||||
https://github.com/pypa/build/tags .*/archive/refs/tags/v?((?:\d+\.?)*)\.tar\.gz
|
|
|
@ -1 +0,0 @@
|
||||||
../CHANGELOG.rst
|
|
|
@ -0,0 +1 @@
|
||||||
|
.. include:: ../CHANGELOG.rst
|
|
@ -35,6 +35,7 @@ extensions = [
|
||||||
'sphinx.ext.intersphinx',
|
'sphinx.ext.intersphinx',
|
||||||
'sphinx_autodoc_typehints',
|
'sphinx_autodoc_typehints',
|
||||||
'sphinx_argparse_cli',
|
'sphinx_argparse_cli',
|
||||||
|
'sphinx_issues',
|
||||||
]
|
]
|
||||||
|
|
||||||
intersphinx_mapping = {
|
intersphinx_mapping = {
|
||||||
|
@ -65,3 +66,11 @@ html_title = f'build {version}'
|
||||||
# html_static_path = ['_static']
|
# html_static_path = ['_static']
|
||||||
|
|
||||||
autoclass_content = 'both'
|
autoclass_content = 'both'
|
||||||
|
|
||||||
|
nitpick_ignore = [
|
||||||
|
# https://github.com/python/importlib_metadata/issues/316
|
||||||
|
('py:class', 'importlib.metadata._meta.PackageMetadata'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
issues_github_path = 'pypa/build'
|
||||||
|
|
|
@ -4,9 +4,10 @@
|
||||||
build
|
build
|
||||||
*****
|
*****
|
||||||
|
|
||||||
A simple, correct :pep:`517` build frontend.
|
A simple, correct Python packaging build frontend.
|
||||||
|
|
||||||
build will invoke the :pep:`517` hooks to build a distribution package.
|
build manages ``pyproject.toml``-based builds, invoking
|
||||||
|
build-backend hooks as appropriate to build a distribution package.
|
||||||
It is a simple build tool and does not perform any dependency management.
|
It is a simple build tool and does not perform any dependency management.
|
||||||
|
|
||||||
.. sphinx_argparse_cli::
|
.. sphinx_argparse_cli::
|
||||||
|
@ -43,6 +44,7 @@ environment, but this behavior can be disabled with ``--no-isolation``.
|
||||||
:hidden:
|
:hidden:
|
||||||
|
|
||||||
test_suite
|
test_suite
|
||||||
|
release
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:caption: Project Links
|
:caption: Project Links
|
||||||
|
|
|
@ -2,20 +2,18 @@
|
||||||
Installation
|
Installation
|
||||||
============
|
============
|
||||||
|
|
||||||
You can download a tarball_ from Github, checkout the latest `git tag`_ or fetch
|
``build`` can be installed via `pip`_ or an equivalent:
|
||||||
the artifacts from `project page`_ on PyPI.
|
|
||||||
|
|
||||||
The recommended way is to checkout the git tags, as they are PGP signed with one
|
|
||||||
of the following keys:
|
|
||||||
|
|
||||||
- |3DCE51D60930EBA47858BA4146F633CBB0EB4BF2|_ *(Filipe Laíns)*
|
|
||||||
|
|
||||||
``build`` may also be installed via `pip`_ or an equivalent:
|
|
||||||
|
|
||||||
.. code-block:: sh
|
.. code-block:: sh
|
||||||
|
|
||||||
$ pip install build
|
$ pip install build
|
||||||
|
|
||||||
|
You can also check out the latest `git tag`_, download a tarball_ from GitHub, or
|
||||||
|
manually fetch the artifacts from the `project page`_ on PyPI. The git tags are
|
||||||
|
recommended for redistribution and are PGP-signed with one of the following keys:
|
||||||
|
|
||||||
|
- |3DCE51D60930EBA47858BA4146F633CBB0EB4BF2|_ *(Filipe Laíns)*
|
||||||
|
|
||||||
.. tip::
|
.. tip::
|
||||||
If you prefer, or are already using virtualenv_ in your workflow, you can
|
If you prefer, or are already using virtualenv_ in your workflow, you can
|
||||||
install ``build`` with the optional ``virtualenv`` dependency:
|
install ``build`` with the optional ``virtualenv`` dependency:
|
||||||
|
@ -33,13 +31,9 @@ of the following keys:
|
||||||
Bootstrapping
|
Bootstrapping
|
||||||
=============
|
=============
|
||||||
|
|
||||||
This package can build itself with only the ``toml`` and ``pep517``
|
This package can build itself only with the ``tomli`` (can be omitted in Python 3.11+)
|
||||||
dependencies. The ``--skip-dependency-check`` flag should be used in this
|
and ``pyproject-hooks`` dependencies.
|
||||||
case.
|
The ``--skip-dependency-check`` flag should be used in this case.
|
||||||
|
|
||||||
On Python 3.10 and older, we have a dependency on tomli_, but toml_ can be
|
|
||||||
used instead, which may make bootstrapping easier.
|
|
||||||
|
|
||||||
|
|
||||||
Compatibility
|
Compatibility
|
||||||
=============
|
=============
|
||||||
|
@ -47,13 +41,11 @@ Compatibility
|
||||||
``build`` is verified to be compatible with the following Python
|
``build`` is verified to be compatible with the following Python
|
||||||
versions:
|
versions:
|
||||||
|
|
||||||
- 2.7
|
|
||||||
- 3.5
|
|
||||||
- 3.6
|
|
||||||
- 3.7
|
- 3.7
|
||||||
- 3.8
|
- 3.8
|
||||||
- 3.9
|
- 3.9
|
||||||
- PyPy(2)
|
- 3.10
|
||||||
|
- 3.11
|
||||||
- PyPy3
|
- PyPy3
|
||||||
|
|
||||||
|
|
||||||
|
@ -70,7 +62,6 @@ versions:
|
||||||
.. _project page: https://pypi.org/project/build/
|
.. _project page: https://pypi.org/project/build/
|
||||||
|
|
||||||
.. _tomli: https://github.com/hukkin/tomli
|
.. _tomli: https://github.com/hukkin/tomli
|
||||||
.. _toml: https://github.com/uiri/toml
|
|
||||||
|
|
||||||
|
|
||||||
.. |3DCE51D60930EBA47858BA4146F633CBB0EB4BF2| replace:: ``3DCE51D60930EBA47858BA4146F633CBB0EB4BF2``
|
.. |3DCE51D60930EBA47858BA4146F633CBB0EB4BF2| replace:: ``3DCE51D60930EBA47858BA4146F633CBB0EB4BF2``
|
||||||
|
|
|
@ -0,0 +1,27 @@
|
||||||
|
***************
|
||||||
|
Release Process
|
||||||
|
***************
|
||||||
|
|
||||||
|
As this project is critical to the Python ecosystem's supply chain security, all
|
||||||
|
releases are PGP signed with one of the keys listed in the :doc:`installation page <installation>`.
|
||||||
|
Before releasing please make sure your PGP key is listed there, and preferably
|
||||||
|
signed by one of the other key holders. If your key is not signed by one of the
|
||||||
|
other key holders, please make sure the PR that added your key to the
|
||||||
|
:doc:`installation page <installation>` was approved by at least one other maintainer.
|
||||||
|
|
||||||
|
After that is done, you may release the project by following these steps:
|
||||||
|
|
||||||
|
#. Bump the versions in ``pyproject.toml`` and ``src/build/__init__.py``
|
||||||
|
#. Update ``CHANGELOG.rst`` with the new version and current date
|
||||||
|
#. Make a release commit with the changes made above
|
||||||
|
- The commit message should follow the ``release X.Y.Z`` format
|
||||||
|
#. Make a signed tag (``git tag -s X.Y.Z``)
|
||||||
|
- The tag title should follow the ``build X.Y.Z`` format
|
||||||
|
- The tag body should be a plaintext version of the changelog for the current
|
||||||
|
release
|
||||||
|
#. Push the commit and tag to the repository (``git push`` and ``git push --tags``)
|
||||||
|
#. Build the Python artifacts (``python -m build``)
|
||||||
|
#. Sign and push the artifacts to PyPI (``twine upload -s dist/*``)
|
||||||
|
|
||||||
|
If you have any questions, please look at previous releases and/or ping the
|
||||||
|
other maintainers.
|
132
pyproject.toml
132
pyproject.toml
|
@ -1,6 +1,86 @@
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["setuptools >=42.0"]
|
requires = ["flit-core >= 3.8"]
|
||||||
build-backend = "setuptools.build_meta"
|
build-backend = "flit_core.buildapi"
|
||||||
|
|
||||||
|
[project]
|
||||||
|
name = "build"
|
||||||
|
version = "1.0.3"
|
||||||
|
description = "A simple, correct Python build frontend"
|
||||||
|
readme = "README.md"
|
||||||
|
requires-python = ">= 3.7"
|
||||||
|
license.file = "LICENSE"
|
||||||
|
authors = [
|
||||||
|
{ name = "Filipe Laíns", email = "lains@riseup.net" },
|
||||||
|
{ name = "Bernát Gábor", email = "gaborjbernat@gmail.com" },
|
||||||
|
{ name = "layday", email = "layday@protonmail.com" },
|
||||||
|
{ name = "Henry Schreiner", email = "henryschreineriii@gmail.com" },
|
||||||
|
]
|
||||||
|
classifiers = [
|
||||||
|
"License :: OSI Approved :: MIT License",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.7",
|
||||||
|
"Programming Language :: Python :: 3.8",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: 3.10",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
|
"Programming Language :: Python :: 3.12",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Programming Language :: Python :: Implementation :: PyPy",
|
||||||
|
]
|
||||||
|
urls.homepage = "https://github.com/pypa/build"
|
||||||
|
urls.changelog = "https://pypa-build.readthedocs.io/en/stable/changelog.html"
|
||||||
|
|
||||||
|
dependencies = [
|
||||||
|
"packaging >= 19.0",
|
||||||
|
"pyproject_hooks",
|
||||||
|
# not actually a runtime dependency, only supplied as there is not "recommended dependency" support
|
||||||
|
'colorama; os_name == "nt"',
|
||||||
|
'importlib-metadata >= 4.6; python_version < "3.10"', # Not required for 3.8+, but fixes a stdlib bug
|
||||||
|
'tomli >= 1.1.0; python_version < "3.11"',
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
docs = [
|
||||||
|
"furo >= 2023.08.17",
|
||||||
|
"sphinx ~= 7.0",
|
||||||
|
"sphinx-argparse-cli >= 1.5",
|
||||||
|
"sphinx-autodoc-typehints >= 1.10",
|
||||||
|
"sphinx-issues >= 3.0.0",
|
||||||
|
]
|
||||||
|
test = [
|
||||||
|
"filelock >= 3",
|
||||||
|
"pytest >= 6.2.4",
|
||||||
|
"pytest-cov >= 2.12",
|
||||||
|
"pytest-mock >= 2",
|
||||||
|
"pytest-rerunfailures >= 9.1",
|
||||||
|
"pytest-xdist >= 1.34",
|
||||||
|
"wheel >= 0.36.0",
|
||||||
|
'setuptools >= 42.0.0; python_version < "3.10"',
|
||||||
|
'setuptools >= 56.0.0; python_version == "3.10"',
|
||||||
|
'setuptools >= 56.0.0; python_version == "3.11"',
|
||||||
|
'setuptools >= 67.8.0; python_version >= "3.12"',
|
||||||
|
]
|
||||||
|
typing = [
|
||||||
|
"importlib-metadata >= 5.1",
|
||||||
|
"mypy ~= 1.5.0",
|
||||||
|
"tomli",
|
||||||
|
"typing-extensions >= 3.7.4.3",
|
||||||
|
]
|
||||||
|
virtualenv = [
|
||||||
|
"virtualenv >= 20.0.35",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
pyproject-build = "build.__main__:entrypoint"
|
||||||
|
|
||||||
|
[project.entry-points."pipx.run"]
|
||||||
|
build = "build.__main__:entrypoint"
|
||||||
|
|
||||||
|
[tool.flit.sdist]
|
||||||
|
include = ["tests/", ".gitignore", "CHANGELOG.rst", "docs/", ".dockerignore", "tox.ini"]
|
||||||
|
exclude = ["**/__pycache__", "docs/_build", "**/*.egg-info", "tests/packages/*/build"]
|
||||||
|
|
||||||
|
|
||||||
[tool.coverage.run]
|
[tool.coverage.run]
|
||||||
source = [
|
source = [
|
||||||
|
@ -12,6 +92,7 @@ source = [
|
||||||
exclude_lines = [
|
exclude_lines = [
|
||||||
'\#\s*pragma: no cover',
|
'\#\s*pragma: no cover',
|
||||||
'^\s*raise NotImplementedError\b',
|
'^\s*raise NotImplementedError\b',
|
||||||
|
"if typing.TYPE_CHECKING:",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.coverage.paths]
|
[tool.coverage.paths]
|
||||||
|
@ -35,25 +116,26 @@ norecursedirs = "tests/integration/*"
|
||||||
markers = [
|
markers = [
|
||||||
"isolated",
|
"isolated",
|
||||||
"pypy3323bug",
|
"pypy3323bug",
|
||||||
|
"network",
|
||||||
]
|
]
|
||||||
filterwarnings = [
|
filterwarnings = [
|
||||||
"error",
|
"error",
|
||||||
"ignore:path is deprecated.:DeprecationWarning",
|
"ignore:path is deprecated.:DeprecationWarning",
|
||||||
"ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning",
|
"ignore:The --rsyncdir command line argument and rsyncdirs config variable are deprecated.:DeprecationWarning",
|
||||||
|
"default:Python 3.14 will, by default, filter extracted tar archives:DeprecationWarning",
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.mypy]
|
[tool.mypy]
|
||||||
files = "src"
|
files = "src"
|
||||||
python_version = "3.6"
|
python_version = "3.7"
|
||||||
strict = true
|
strict = true
|
||||||
show_error_codes = true
|
show_error_codes = true
|
||||||
enable_error_code = ["ignore-without-code", "truthy-bool", "redundant-expr"]
|
enable_error_code = ["ignore-without-code", "truthy-bool", "redundant-expr"]
|
||||||
|
|
||||||
|
|
||||||
[[tool.mypy.overrides]]
|
[[tool.mypy.overrides]]
|
||||||
module = [
|
module = [
|
||||||
"colorama", # Optional dependency
|
"colorama", # Optional dependency
|
||||||
"pep517.*", # Untyped
|
"pyproject_hooks.*", # Untyped
|
||||||
"virtualenv", # Optional dependency
|
"virtualenv", # Optional dependency
|
||||||
]
|
]
|
||||||
ignore_missing_imports = true
|
ignore_missing_imports = true
|
||||||
|
@ -61,12 +143,36 @@ ignore_missing_imports = true
|
||||||
[tool.black]
|
[tool.black]
|
||||||
line-length = 127
|
line-length = 127
|
||||||
skip-string-normalization = true
|
skip-string-normalization = true
|
||||||
target-version = ["py39", "py38", "py37", "py36"]
|
|
||||||
|
|
||||||
[tool.isort]
|
[tool.ruff]
|
||||||
profile = "black"
|
line-length = 127
|
||||||
lines_between_types = 1
|
exclude = ["tests/packages/test-bad-syntax"]
|
||||||
lines_after_imports = 2
|
select = [
|
||||||
line_length = 127
|
"B", # flake8-bugbear
|
||||||
known_first_party = "build"
|
"C4", # flake8-comprehensions
|
||||||
skip = [] # "build" is included in the default skip list
|
"C9", # mccabe
|
||||||
|
"E", # pycodestyle
|
||||||
|
"F", # pyflakes
|
||||||
|
"I", # isort
|
||||||
|
"PGH", # pygrep-hooks
|
||||||
|
"RUF", # ruff
|
||||||
|
"UP", # pyupgrade
|
||||||
|
"W", # pycodestyle
|
||||||
|
"YTT", # flake8-2020
|
||||||
|
"TRY", # tryceratops
|
||||||
|
"EM", # flake8-errmsg
|
||||||
|
]
|
||||||
|
src = ["src"]
|
||||||
|
|
||||||
|
[tool.ruff.mccabe]
|
||||||
|
max-complexity = 10
|
||||||
|
|
||||||
|
[tool.ruff.isort]
|
||||||
|
lines-between-types = 1
|
||||||
|
lines-after-imports = 2
|
||||||
|
known-first-party = ["build"]
|
||||||
|
|
||||||
|
[tool.check-wheel-contents]
|
||||||
|
ignore = [
|
||||||
|
"W005", # We _are_ build
|
||||||
|
]
|
||||||
|
|
74
setup.cfg
74
setup.cfg
|
@ -1,74 +0,0 @@
|
||||||
[metadata]
|
|
||||||
name = build
|
|
||||||
version = 0.9.0
|
|
||||||
description = A simple, correct PEP 517 build frontend
|
|
||||||
long_description = file: README.md
|
|
||||||
long_description_content_type = text/markdown
|
|
||||||
author = Filipe Laíns
|
|
||||||
author_email = lains@riseup.net
|
|
||||||
license = MIT
|
|
||||||
license_file = LICENSE
|
|
||||||
classifiers =
|
|
||||||
License :: OSI Approved :: MIT License
|
|
||||||
Programming Language :: Python :: 3
|
|
||||||
Programming Language :: Python :: 3 :: Only
|
|
||||||
Programming Language :: Python :: 3.6
|
|
||||||
Programming Language :: Python :: 3.7
|
|
||||||
Programming Language :: Python :: 3.8
|
|
||||||
Programming Language :: Python :: 3.9
|
|
||||||
Programming Language :: Python :: 3.10
|
|
||||||
Programming Language :: Python :: 3.11
|
|
||||||
Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Programming Language :: Python :: Implementation :: PyPy
|
|
||||||
project_urls =
|
|
||||||
homepage = https://github.com/pypa/build
|
|
||||||
changelog = https://pypa-build.readthedocs.io/en/stable/changelog.html
|
|
||||||
|
|
||||||
[options]
|
|
||||||
packages = find:
|
|
||||||
install_requires =
|
|
||||||
packaging>=19.0
|
|
||||||
pep517>=0.9.1
|
|
||||||
colorama;os_name == "nt" # not actually a runtime dependency, only supplied as there is not "recommended dependency" support
|
|
||||||
importlib-metadata>=0.22;python_version < "3.8"
|
|
||||||
tomli>=1.0.0;python_version < "3.11" # toml can be used instead -- in case it makes bootstrapping easier
|
|
||||||
python_requires = >=3.6
|
|
||||||
package_dir =
|
|
||||||
=src
|
|
||||||
|
|
||||||
[options.packages.find]
|
|
||||||
where = src
|
|
||||||
|
|
||||||
[options.entry_points]
|
|
||||||
console_scripts =
|
|
||||||
pyproject-build = build.__main__:entrypoint
|
|
||||||
pipx.run =
|
|
||||||
build = build.__main__:entrypoint
|
|
||||||
|
|
||||||
[options.extras_require]
|
|
||||||
docs =
|
|
||||||
furo>=2021.08.31
|
|
||||||
sphinx~=4.0
|
|
||||||
sphinx-argparse-cli>=1.5
|
|
||||||
sphinx-autodoc-typehints>=1.10
|
|
||||||
test =
|
|
||||||
filelock>=3
|
|
||||||
pytest>=6.2.4
|
|
||||||
pytest-cov>=2.12
|
|
||||||
pytest-mock>=2
|
|
||||||
pytest-rerunfailures>=9.1
|
|
||||||
pytest-xdist>=1.34
|
|
||||||
toml>=0.10.0
|
|
||||||
wheel>=0.36.0
|
|
||||||
setuptools>=42.0.0;python_version < "3.10"
|
|
||||||
setuptools>=56.0.0;python_version >= "3.10"
|
|
||||||
typing =
|
|
||||||
importlib-metadata>=4.6.4
|
|
||||||
mypy==0.950
|
|
||||||
typing-extensions>=3.7.4.3;python_version < "3.8"
|
|
||||||
virtualenv =
|
|
||||||
virtualenv>=20.0.35
|
|
||||||
|
|
||||||
[options.package_data]
|
|
||||||
build =
|
|
||||||
py.typed
|
|
|
@ -4,67 +4,47 @@
|
||||||
build - A simple, correct PEP 517 build frontend
|
build - A simple, correct PEP 517 build frontend
|
||||||
"""
|
"""
|
||||||
|
|
||||||
__version__ = '0.9.0'
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
__version__ = '1.0.3'
|
||||||
|
|
||||||
import contextlib
|
import contextlib
|
||||||
import difflib
|
import difflib
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
|
||||||
import types
|
|
||||||
import warnings
|
import warnings
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections.abc import Iterator
|
||||||
from typing import (
|
from typing import Any, Callable, Mapping, Optional, Sequence, TypeVar, Union
|
||||||
AbstractSet,
|
|
||||||
Any,
|
import pyproject_hooks
|
||||||
Callable,
|
|
||||||
Dict,
|
from . import env
|
||||||
Iterator,
|
from ._exceptions import (
|
||||||
List,
|
BuildBackendException,
|
||||||
Mapping,
|
BuildException,
|
||||||
MutableMapping,
|
BuildSystemTableValidationError,
|
||||||
Optional,
|
FailedProcessError,
|
||||||
Sequence,
|
TypoWarning,
|
||||||
Set,
|
|
||||||
Tuple,
|
|
||||||
Type,
|
|
||||||
Union,
|
|
||||||
)
|
)
|
||||||
|
from ._util import check_dependency, parse_wheel_filename
|
||||||
|
|
||||||
import pep517.wrappers
|
|
||||||
|
|
||||||
|
|
||||||
TOMLDecodeError: Type[Exception]
|
|
||||||
toml_loads: Callable[[str], MutableMapping[str, Any]]
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 11):
|
if sys.version_info >= (3, 11):
|
||||||
from tomllib import TOMLDecodeError
|
import tomllib
|
||||||
from tomllib import loads as toml_loads
|
|
||||||
else:
|
else:
|
||||||
try:
|
import tomli as tomllib
|
||||||
from tomli import TOMLDecodeError
|
|
||||||
from tomli import loads as toml_loads
|
|
||||||
except ModuleNotFoundError: # pragma: no cover
|
|
||||||
from toml import TomlDecodeError as TOMLDecodeError # type: ignore[import,no-redef]
|
|
||||||
from toml import loads as toml_loads # type: ignore[no-redef]
|
|
||||||
|
|
||||||
|
|
||||||
RunnerType = Callable[[Sequence[str], Optional[str], Optional[Mapping[str, str]]], None]
|
RunnerType = Callable[[Sequence[str], Optional[str], Optional[Mapping[str, str]]], None]
|
||||||
ConfigSettingsType = Mapping[str, Union[str, Sequence[str]]]
|
ConfigSettingsType = Mapping[str, Union[str, Sequence[str]]]
|
||||||
PathType = Union[str, 'os.PathLike[str]']
|
PathType = Union[str, 'os.PathLike[str]']
|
||||||
_ExcInfoType = Union[Tuple[Type[BaseException], BaseException, types.TracebackType], Tuple[None, None, None]]
|
|
||||||
|
|
||||||
|
_TProjectBuilder = TypeVar('_TProjectBuilder', bound='ProjectBuilder')
|
||||||
_WHEEL_NAME_REGEX = re.compile(
|
|
||||||
r'(?P<distribution>.+)-(?P<version>.+)'
|
|
||||||
r'(-(?P<build_tag>.+))?-(?P<python_tag>.+)'
|
|
||||||
r'-(?P<abi_tag>.+)-(?P<platform_tag>.+)\.whl'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_DEFAULT_BACKEND = {
|
_DEFAULT_BACKEND = {
|
||||||
|
@ -76,148 +56,42 @@ _DEFAULT_BACKEND = {
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class BuildException(Exception):
|
|
||||||
"""
|
|
||||||
Exception raised by :class:`ProjectBuilder`
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class BuildBackendException(Exception):
|
|
||||||
"""
|
|
||||||
Exception raised when a backend operation fails
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, exception: Exception, description: Optional[str] = None, exc_info: _ExcInfoType = (None, None, None)
|
|
||||||
) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.exception = exception
|
|
||||||
self.exc_info = exc_info
|
|
||||||
self._description = description
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
if self._description:
|
|
||||||
return self._description
|
|
||||||
return f'Backend operation failed: {self.exception!r}'
|
|
||||||
|
|
||||||
|
|
||||||
class BuildSystemTableValidationError(BuildException):
|
|
||||||
"""
|
|
||||||
Exception raised when the ``[build-system]`` table in pyproject.toml is invalid.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
return f'Failed to validate `build-system` in pyproject.toml: {self.args[0]}'
|
|
||||||
|
|
||||||
|
|
||||||
class FailedProcessError(Exception):
|
|
||||||
"""
|
|
||||||
Exception raised when an setup or prepration operation fails.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, exception: subprocess.CalledProcessError, description: str) -> None:
|
|
||||||
super().__init__()
|
|
||||||
self.exception = exception
|
|
||||||
self._description = description
|
|
||||||
|
|
||||||
def __str__(self) -> str:
|
|
||||||
cmd = ' '.join(self.exception.cmd)
|
|
||||||
description = f"{self._description}\n Command '{cmd}' failed with return code {self.exception.returncode}"
|
|
||||||
for stream_name in ('stdout', 'stderr'):
|
|
||||||
stream = getattr(self.exception, stream_name)
|
|
||||||
if stream:
|
|
||||||
description += f'\n {stream_name}:\n'
|
|
||||||
description += textwrap.indent(stream.decode(), ' ')
|
|
||||||
return description
|
|
||||||
|
|
||||||
|
|
||||||
class TypoWarning(Warning):
|
|
||||||
"""
|
|
||||||
Warning raised when a possible typo is found
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
|
||||||
def _working_directory(path: str) -> Iterator[None]:
|
|
||||||
current = os.getcwd()
|
|
||||||
|
|
||||||
os.chdir(path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
os.chdir(current)
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_source_directory(srcdir: PathType) -> None:
|
|
||||||
if not os.path.isdir(srcdir):
|
|
||||||
raise BuildException(f'Source {srcdir} is not a directory')
|
|
||||||
pyproject_toml = os.path.join(srcdir, 'pyproject.toml')
|
|
||||||
setup_py = os.path.join(srcdir, 'setup.py')
|
|
||||||
if not os.path.exists(pyproject_toml) and not os.path.exists(setup_py):
|
|
||||||
raise BuildException(f'Source {srcdir} does not appear to be a Python project: no pyproject.toml or setup.py')
|
|
||||||
|
|
||||||
|
|
||||||
def check_dependency(
|
|
||||||
req_string: str, ancestral_req_strings: Tuple[str, ...] = (), parent_extras: AbstractSet[str] = frozenset()
|
|
||||||
) -> Iterator[Tuple[str, ...]]:
|
|
||||||
"""
|
|
||||||
Verify that a dependency and all of its dependencies are met.
|
|
||||||
|
|
||||||
:param req_string: Requirement string
|
|
||||||
:param parent_extras: Extras (eg. "test" in myproject[test])
|
|
||||||
:yields: Unmet dependencies
|
|
||||||
"""
|
|
||||||
import packaging.requirements
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 8):
|
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
else:
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
req = packaging.requirements.Requirement(req_string)
|
|
||||||
normalised_req_string = str(req)
|
|
||||||
|
|
||||||
# ``Requirement`` doesn't implement ``__eq__`` so we cannot compare reqs for
|
|
||||||
# equality directly but the string representation is stable.
|
|
||||||
if normalised_req_string in ancestral_req_strings:
|
|
||||||
# cyclical dependency, already checked.
|
|
||||||
return
|
|
||||||
|
|
||||||
if req.marker:
|
|
||||||
extras = frozenset(('',)).union(parent_extras)
|
|
||||||
# a requirement can have multiple extras but ``evaluate`` can
|
|
||||||
# only check one at a time.
|
|
||||||
if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
|
|
||||||
# if the marker conditions are not met, we pretend that the
|
|
||||||
# dependency is satisfied.
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
dist = importlib_metadata.distribution(req.name) # type: ignore[no-untyped-call]
|
|
||||||
except importlib_metadata.PackageNotFoundError:
|
|
||||||
# dependency is not installed in the environment.
|
|
||||||
yield ancestral_req_strings + (normalised_req_string,)
|
|
||||||
else:
|
|
||||||
if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
|
|
||||||
# the installed version is incompatible.
|
|
||||||
yield ancestral_req_strings + (normalised_req_string,)
|
|
||||||
elif dist.requires:
|
|
||||||
for other_req_string in dist.requires:
|
|
||||||
# yields transitive dependencies that are not satisfied.
|
|
||||||
yield from check_dependency(other_req_string, ancestral_req_strings + (normalised_req_string,), req.extras)
|
|
||||||
|
|
||||||
|
|
||||||
def _find_typo(dictionary: Mapping[str, str], expected: str) -> None:
|
def _find_typo(dictionary: Mapping[str, str], expected: str) -> None:
|
||||||
for obj in dictionary:
|
for obj in dictionary:
|
||||||
if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
|
if difflib.SequenceMatcher(None, expected, obj).ratio() >= 0.8:
|
||||||
warnings.warn(
|
warnings.warn(
|
||||||
f"Found '{obj}' in pyproject.toml, did you mean '{expected}'?",
|
f"Found '{obj}' in pyproject.toml, did you mean '{expected}'?",
|
||||||
TypoWarning,
|
TypoWarning,
|
||||||
|
stacklevel=2,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Dict[str, Any]:
|
def _validate_source_directory(source_dir: PathType) -> None:
|
||||||
|
if not os.path.isdir(source_dir):
|
||||||
|
msg = f'Source {source_dir} is not a directory'
|
||||||
|
raise BuildException(msg)
|
||||||
|
pyproject_toml = os.path.join(source_dir, 'pyproject.toml')
|
||||||
|
setup_py = os.path.join(source_dir, 'setup.py')
|
||||||
|
if not os.path.exists(pyproject_toml) and not os.path.exists(setup_py):
|
||||||
|
msg = f'Source {source_dir} does not appear to be a Python project: no pyproject.toml or setup.py'
|
||||||
|
raise BuildException(msg)
|
||||||
|
|
||||||
|
|
||||||
|
def _read_pyproject_toml(path: PathType) -> Mapping[str, Any]:
|
||||||
|
try:
|
||||||
|
with open(path, 'rb') as f:
|
||||||
|
return tomllib.loads(f.read().decode())
|
||||||
|
except FileNotFoundError:
|
||||||
|
return {}
|
||||||
|
except PermissionError as e:
|
||||||
|
msg = f"{e.strerror}: '{e.filename}' "
|
||||||
|
raise BuildException(msg) from None
|
||||||
|
except tomllib.TOMLDecodeError as e:
|
||||||
|
msg = f'Failed to parse {path}: {e} '
|
||||||
|
raise BuildException(msg) from None
|
||||||
|
|
||||||
|
|
||||||
|
def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Mapping[str, Any]:
|
||||||
# If pyproject.toml is missing (per PEP 517) or [build-system] is missing
|
# If pyproject.toml is missing (per PEP 517) or [build-system] is missing
|
||||||
# (per PEP 518), use default values
|
# (per PEP 518), use default values
|
||||||
if 'build-system' not in pyproject_toml:
|
if 'build-system' not in pyproject_toml:
|
||||||
|
@ -229,11 +103,13 @@ def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Dict[str, An
|
||||||
# If [build-system] is present, it must have a ``requires`` field (per PEP 518)
|
# If [build-system] is present, it must have a ``requires`` field (per PEP 518)
|
||||||
if 'requires' not in build_system_table:
|
if 'requires' not in build_system_table:
|
||||||
_find_typo(build_system_table, 'requires')
|
_find_typo(build_system_table, 'requires')
|
||||||
raise BuildSystemTableValidationError('`requires` is a required property')
|
msg = '`requires` is a required property'
|
||||||
|
raise BuildSystemTableValidationError(msg)
|
||||||
elif not isinstance(build_system_table['requires'], list) or not all(
|
elif not isinstance(build_system_table['requires'], list) or not all(
|
||||||
isinstance(i, str) for i in build_system_table['requires']
|
isinstance(i, str) for i in build_system_table['requires']
|
||||||
):
|
):
|
||||||
raise BuildSystemTableValidationError('`requires` must be an array of strings')
|
msg = '`requires` must be an array of strings'
|
||||||
|
raise BuildSystemTableValidationError(msg)
|
||||||
|
|
||||||
if 'build-backend' not in build_system_table:
|
if 'build-backend' not in build_system_table:
|
||||||
_find_typo(build_system_table, 'build-backend')
|
_find_typo(build_system_table, 'build-backend')
|
||||||
|
@ -241,21 +117,31 @@ def _parse_build_system_table(pyproject_toml: Mapping[str, Any]) -> Dict[str, An
|
||||||
# but leave ``requires`` intact to emulate pip
|
# but leave ``requires`` intact to emulate pip
|
||||||
build_system_table['build-backend'] = _DEFAULT_BACKEND['build-backend']
|
build_system_table['build-backend'] = _DEFAULT_BACKEND['build-backend']
|
||||||
elif not isinstance(build_system_table['build-backend'], str):
|
elif not isinstance(build_system_table['build-backend'], str):
|
||||||
raise BuildSystemTableValidationError('`build-backend` must be a string')
|
msg = '`build-backend` must be a string'
|
||||||
|
raise BuildSystemTableValidationError(msg)
|
||||||
|
|
||||||
if 'backend-path' in build_system_table and (
|
if 'backend-path' in build_system_table and (
|
||||||
not isinstance(build_system_table['backend-path'], list)
|
not isinstance(build_system_table['backend-path'], list)
|
||||||
or not all(isinstance(i, str) for i in build_system_table['backend-path'])
|
or not all(isinstance(i, str) for i in build_system_table['backend-path'])
|
||||||
):
|
):
|
||||||
raise BuildSystemTableValidationError('`backend-path` must be an array of strings')
|
msg = '`backend-path` must be an array of strings'
|
||||||
|
raise BuildSystemTableValidationError(msg)
|
||||||
|
|
||||||
unknown_props = build_system_table.keys() - {'requires', 'build-backend', 'backend-path'}
|
unknown_props = build_system_table.keys() - {'requires', 'build-backend', 'backend-path'}
|
||||||
if unknown_props:
|
if unknown_props:
|
||||||
raise BuildSystemTableValidationError(f'Unknown properties: {", ".join(unknown_props)}')
|
msg = f'Unknown properties: {", ".join(unknown_props)}'
|
||||||
|
raise BuildSystemTableValidationError(msg)
|
||||||
|
|
||||||
return build_system_table
|
return build_system_table
|
||||||
|
|
||||||
|
|
||||||
|
def _wrap_subprocess_runner(runner: RunnerType, env: env.IsolatedEnv) -> RunnerType:
|
||||||
|
def _invoke_wrapped_runner(cmd: Sequence[str], cwd: str | None, extra_environ: Mapping[str, str] | None) -> None:
|
||||||
|
runner(cmd, cwd, {**(env.make_extra_environ() or {}), **(extra_environ or {})})
|
||||||
|
|
||||||
|
return _invoke_wrapped_runner
|
||||||
|
|
||||||
|
|
||||||
class ProjectBuilder:
|
class ProjectBuilder:
|
||||||
"""
|
"""
|
||||||
The PEP 517 consumer API.
|
The PEP 517 consumer API.
|
||||||
|
@ -263,100 +149,73 @@ class ProjectBuilder:
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
srcdir: PathType,
|
source_dir: PathType,
|
||||||
python_executable: str = sys.executable,
|
python_executable: str = sys.executable,
|
||||||
scripts_dir: Optional[str] = None,
|
runner: RunnerType = pyproject_hooks.default_subprocess_runner,
|
||||||
runner: RunnerType = pep517.wrappers.default_subprocess_runner,
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
:param srcdir: The source directory
|
:param source_dir: The source directory
|
||||||
:param scripts_dir: The location of the scripts dir (defaults to the folder where the python executable lives)
|
|
||||||
:param python_executable: The python executable where the backend lives
|
:param python_executable: The python executable where the backend lives
|
||||||
:param runner: An alternative runner for backend subprocesses
|
:param runner: Runner for backend subprocesses
|
||||||
|
|
||||||
The 'runner', if provided, must accept the following arguments:
|
The ``runner``, if provided, must accept the following arguments:
|
||||||
|
|
||||||
- cmd: a list of strings representing the command and arguments to
|
- ``cmd``: a list of strings representing the command and arguments to
|
||||||
execute, as would be passed to e.g. 'subprocess.check_call'.
|
execute, as would be passed to e.g. 'subprocess.check_call'.
|
||||||
- cwd: a string representing the working directory that must be
|
- ``cwd``: a string representing the working directory that must be
|
||||||
used for the subprocess. Corresponds to the provided srcdir.
|
used for the subprocess. Corresponds to the provided source_dir.
|
||||||
- extra_environ: a dict mapping environment variable names to values
|
- ``extra_environ``: a dict mapping environment variable names to values
|
||||||
which must be set for the subprocess execution.
|
which must be set for the subprocess execution.
|
||||||
|
|
||||||
The default runner simply calls the backend hooks in a subprocess, writing backend output
|
The default runner simply calls the backend hooks in a subprocess, writing backend output
|
||||||
to stdout/stderr.
|
to stdout/stderr.
|
||||||
"""
|
"""
|
||||||
self._srcdir: str = os.path.abspath(srcdir)
|
self._source_dir: str = os.path.abspath(source_dir)
|
||||||
_validate_source_directory(srcdir)
|
_validate_source_directory(source_dir)
|
||||||
|
|
||||||
spec_file = os.path.join(srcdir, 'pyproject.toml')
|
self._python_executable = python_executable
|
||||||
|
self._runner = runner
|
||||||
|
|
||||||
try:
|
pyproject_toml_path = os.path.join(source_dir, 'pyproject.toml')
|
||||||
with open(spec_file, 'rb') as f:
|
self._build_system = _parse_build_system_table(_read_pyproject_toml(pyproject_toml_path))
|
||||||
spec = toml_loads(f.read().decode())
|
|
||||||
except FileNotFoundError:
|
|
||||||
spec = {}
|
|
||||||
except PermissionError as e:
|
|
||||||
raise BuildException(f"{e.strerror}: '{e.filename}' ") # noqa: B904 # use raise from
|
|
||||||
except TOMLDecodeError as e:
|
|
||||||
raise BuildException(f'Failed to parse {spec_file}: {e} ') # noqa: B904 # use raise from
|
|
||||||
|
|
||||||
self._build_system = _parse_build_system_table(spec)
|
|
||||||
self._backend = self._build_system['build-backend']
|
self._backend = self._build_system['build-backend']
|
||||||
self._scripts_dir = scripts_dir
|
|
||||||
self._hook_runner = runner
|
self._hook = pyproject_hooks.BuildBackendHookCaller(
|
||||||
self._hook = pep517.wrappers.Pep517HookCaller(
|
self._source_dir,
|
||||||
self.srcdir,
|
|
||||||
self._backend,
|
self._backend,
|
||||||
backend_path=self._build_system.get('backend-path'),
|
backend_path=self._build_system.get('backend-path'),
|
||||||
python_executable=python_executable,
|
python_executable=self._python_executable,
|
||||||
runner=self._runner,
|
runner=self._runner,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _runner(
|
@classmethod
|
||||||
self, cmd: Sequence[str], cwd: Optional[str] = None, extra_environ: Optional[Mapping[str, str]] = None
|
def from_isolated_env(
|
||||||
) -> None:
|
cls: type[_TProjectBuilder],
|
||||||
# if script dir is specified must be inserted at the start of PATH (avoid duplicate path while doing so)
|
env: env.IsolatedEnv,
|
||||||
if self.scripts_dir is not None:
|
source_dir: PathType,
|
||||||
paths: Dict[str, None] = OrderedDict()
|
runner: RunnerType = pyproject_hooks.default_subprocess_runner,
|
||||||
paths[str(self.scripts_dir)] = None
|
) -> _TProjectBuilder:
|
||||||
if 'PATH' in os.environ:
|
return cls(
|
||||||
paths.update((i, None) for i in os.environ['PATH'].split(os.pathsep))
|
source_dir=source_dir,
|
||||||
extra_environ = {} if extra_environ is None else dict(extra_environ)
|
python_executable=env.python_executable,
|
||||||
extra_environ['PATH'] = os.pathsep.join(paths)
|
runner=_wrap_subprocess_runner(runner, env),
|
||||||
self._hook_runner(cmd, cwd, extra_environ)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def srcdir(self) -> str:
|
def source_dir(self) -> str:
|
||||||
"""Project source directory."""
|
"""Project source directory."""
|
||||||
return self._srcdir
|
return self._source_dir
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def python_executable(self) -> str:
|
def python_executable(self) -> str:
|
||||||
"""
|
"""
|
||||||
The Python executable used to invoke the backend.
|
The Python executable used to invoke the backend.
|
||||||
"""
|
"""
|
||||||
# make mypy happy
|
return self._python_executable
|
||||||
exe: str = self._hook.python_executable
|
|
||||||
return exe
|
|
||||||
|
|
||||||
@python_executable.setter
|
|
||||||
def python_executable(self, value: str) -> None:
|
|
||||||
self._hook.python_executable = value
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def scripts_dir(self) -> Optional[str]:
|
def build_system_requires(self) -> set[str]:
|
||||||
"""
|
|
||||||
The folder where the scripts are stored for the python executable.
|
|
||||||
"""
|
|
||||||
return self._scripts_dir
|
|
||||||
|
|
||||||
@scripts_dir.setter
|
|
||||||
def scripts_dir(self, value: Optional[str]) -> None:
|
|
||||||
self._scripts_dir = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def build_system_requires(self) -> Set[str]:
|
|
||||||
"""
|
"""
|
||||||
The dependencies defined in the ``pyproject.toml``'s
|
The dependencies defined in the ``pyproject.toml``'s
|
||||||
``build-system.requires`` field or the default build dependencies
|
``build-system.requires`` field or the default build dependencies
|
||||||
|
@ -364,7 +223,7 @@ class ProjectBuilder:
|
||||||
"""
|
"""
|
||||||
return set(self._build_system['requires'])
|
return set(self._build_system['requires'])
|
||||||
|
|
||||||
def get_requires_for_build(self, distribution: str, config_settings: Optional[ConfigSettingsType] = None) -> Set[str]:
|
def get_requires_for_build(self, distribution: str, config_settings: ConfigSettingsType | None = None) -> set[str]:
|
||||||
"""
|
"""
|
||||||
Return the dependencies defined by the backend in addition to
|
Return the dependencies defined by the backend in addition to
|
||||||
:attr:`build_system_requires` for a given distribution.
|
:attr:`build_system_requires` for a given distribution.
|
||||||
|
@ -380,9 +239,7 @@ class ProjectBuilder:
|
||||||
with self._handle_backend(hook_name):
|
with self._handle_backend(hook_name):
|
||||||
return set(get_requires(config_settings))
|
return set(get_requires(config_settings))
|
||||||
|
|
||||||
def check_dependencies(
|
def check_dependencies(self, distribution: str, config_settings: ConfigSettingsType | None = None) -> set[tuple[str, ...]]:
|
||||||
self, distribution: str, config_settings: Optional[ConfigSettingsType] = None
|
|
||||||
) -> Set[Tuple[str, ...]]:
|
|
||||||
"""
|
"""
|
||||||
Return the dependencies which are not satisfied from the combined set of
|
Return the dependencies which are not satisfied from the combined set of
|
||||||
:attr:`build_system_requires` and :meth:`get_requires_for_build` for a given
|
:attr:`build_system_requires` and :meth:`get_requires_for_build` for a given
|
||||||
|
@ -396,8 +253,8 @@ class ProjectBuilder:
|
||||||
return {u for d in dependencies for u in check_dependency(d)}
|
return {u for d in dependencies for u in check_dependency(d)}
|
||||||
|
|
||||||
def prepare(
|
def prepare(
|
||||||
self, distribution: str, output_directory: PathType, config_settings: Optional[ConfigSettingsType] = None
|
self, distribution: str, output_directory: PathType, config_settings: ConfigSettingsType | None = None
|
||||||
) -> Optional[str]:
|
) -> str | None:
|
||||||
"""
|
"""
|
||||||
Prepare metadata for a distribution.
|
Prepare metadata for a distribution.
|
||||||
|
|
||||||
|
@ -415,7 +272,7 @@ class ProjectBuilder:
|
||||||
_allow_fallback=False,
|
_allow_fallback=False,
|
||||||
)
|
)
|
||||||
except BuildBackendException as exception:
|
except BuildBackendException as exception:
|
||||||
if isinstance(exception.exception, pep517.wrappers.HookMissing):
|
if isinstance(exception.exception, pyproject_hooks.HookMissing):
|
||||||
return None
|
return None
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
@ -423,8 +280,8 @@ class ProjectBuilder:
|
||||||
self,
|
self,
|
||||||
distribution: str,
|
distribution: str,
|
||||||
output_directory: PathType,
|
output_directory: PathType,
|
||||||
config_settings: Optional[ConfigSettingsType] = None,
|
config_settings: ConfigSettingsType | None = None,
|
||||||
metadata_directory: Optional[str] = None,
|
metadata_directory: str | None = None,
|
||||||
) -> str:
|
) -> str:
|
||||||
"""
|
"""
|
||||||
Build a distribution.
|
Build a distribution.
|
||||||
|
@ -457,9 +314,10 @@ class ProjectBuilder:
|
||||||
|
|
||||||
# fallback to build_wheel hook
|
# fallback to build_wheel hook
|
||||||
wheel = self.build('wheel', output_directory)
|
wheel = self.build('wheel', output_directory)
|
||||||
match = _WHEEL_NAME_REGEX.match(os.path.basename(wheel))
|
match = parse_wheel_filename(os.path.basename(wheel))
|
||||||
if not match:
|
if not match:
|
||||||
raise ValueError('Invalid wheel')
|
msg = 'Invalid wheel'
|
||||||
|
raise ValueError(msg)
|
||||||
distinfo = f"{match['distribution']}-{match['version']}.dist-info"
|
distinfo = f"{match['distribution']}-{match['version']}.dist-info"
|
||||||
member_prefix = f'{distinfo}/'
|
member_prefix = f'{distinfo}/'
|
||||||
with zipfile.ZipFile(wheel) as w:
|
with zipfile.ZipFile(wheel) as w:
|
||||||
|
@ -470,7 +328,7 @@ class ProjectBuilder:
|
||||||
return os.path.join(output_directory, distinfo)
|
return os.path.join(output_directory, distinfo)
|
||||||
|
|
||||||
def _call_backend(
|
def _call_backend(
|
||||||
self, hook_name: str, outdir: PathType, config_settings: Optional[ConfigSettingsType] = None, **kwargs: Any
|
self, hook_name: str, outdir: PathType, config_settings: ConfigSettingsType | None = None, **kwargs: Any
|
||||||
) -> str:
|
) -> str:
|
||||||
outdir = os.path.abspath(outdir)
|
outdir = os.path.abspath(outdir)
|
||||||
|
|
||||||
|
@ -478,7 +336,8 @@ class ProjectBuilder:
|
||||||
|
|
||||||
if os.path.exists(outdir):
|
if os.path.exists(outdir):
|
||||||
if not os.path.isdir(outdir):
|
if not os.path.isdir(outdir):
|
||||||
raise BuildException(f"Build path '{outdir}' exists and is not a directory")
|
msg = f"Build path '{outdir}' exists and is not a directory"
|
||||||
|
raise BuildException(msg)
|
||||||
else:
|
else:
|
||||||
os.makedirs(outdir)
|
os.makedirs(outdir)
|
||||||
|
|
||||||
|
@ -489,21 +348,18 @@ class ProjectBuilder:
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
def _handle_backend(self, hook: str) -> Iterator[None]:
|
def _handle_backend(self, hook: str) -> Iterator[None]:
|
||||||
with _working_directory(self.srcdir):
|
try:
|
||||||
try:
|
yield
|
||||||
yield
|
except pyproject_hooks.BackendUnavailable as exception:
|
||||||
except pep517.wrappers.BackendUnavailable as exception:
|
raise BuildBackendException(
|
||||||
raise BuildBackendException( # noqa: B904 # use raise from
|
exception,
|
||||||
exception,
|
f"Backend '{self._backend}' is not available.",
|
||||||
f"Backend '{self._backend}' is not available.",
|
sys.exc_info(),
|
||||||
sys.exc_info(),
|
) from None
|
||||||
)
|
except subprocess.CalledProcessError as exception:
|
||||||
except subprocess.CalledProcessError as exception:
|
raise BuildBackendException(exception, f'Backend subprocess exited when trying to invoke {hook}') from None
|
||||||
raise BuildBackendException( # noqa: B904 # use raise from
|
except Exception as exception:
|
||||||
exception, f'Backend subprocess exited when trying to invoke {hook}'
|
raise BuildBackendException(exception, exc_info=sys.exc_info()) from None
|
||||||
)
|
|
||||||
except Exception as exception:
|
|
||||||
raise BuildBackendException(exception, exc_info=sys.exc_info()) # noqa: B904 # use raise from
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def log(message: str) -> None:
|
def log(message: str) -> None:
|
||||||
|
@ -535,5 +391,5 @@ __all__ = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def __dir__() -> List[str]:
|
def __dir__() -> list[str]:
|
||||||
return __all__
|
return __all__
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import contextlib
|
import contextlib
|
||||||
|
@ -8,18 +9,20 @@ import platform
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
import tarfile
|
|
||||||
import tempfile
|
import tempfile
|
||||||
import textwrap
|
import textwrap
|
||||||
import traceback
|
import traceback
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from typing import Dict, Iterator, List, NoReturn, Optional, Sequence, TextIO, Type, Union
|
from collections.abc import Iterator, Sequence
|
||||||
|
from functools import partial
|
||||||
|
from typing import NoReturn, TextIO
|
||||||
|
|
||||||
import build
|
import build
|
||||||
|
|
||||||
from build import BuildBackendException, BuildException, ConfigSettingsType, FailedProcessError, PathType, ProjectBuilder
|
from . import ConfigSettingsType, PathType, ProjectBuilder
|
||||||
from build.env import IsolatedEnvBuilder
|
from ._exceptions import BuildBackendException, BuildException, FailedProcessError
|
||||||
|
from .env import DefaultIsolatedEnv
|
||||||
|
|
||||||
|
|
||||||
_COLORS = {
|
_COLORS = {
|
||||||
|
@ -34,10 +37,10 @@ _COLORS = {
|
||||||
_NO_COLORS = {color: '' for color in _COLORS}
|
_NO_COLORS = {color: '' for color in _COLORS}
|
||||||
|
|
||||||
|
|
||||||
def _init_colors() -> Dict[str, str]:
|
def _init_colors() -> dict[str, str]:
|
||||||
if 'NO_COLOR' in os.environ:
|
if 'NO_COLOR' in os.environ:
|
||||||
if 'FORCE_COLOR' in os.environ:
|
if 'FORCE_COLOR' in os.environ:
|
||||||
warnings.warn('Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color')
|
warnings.warn('Both NO_COLOR and FORCE_COLOR environment variables are set, disabling color', stacklevel=2)
|
||||||
return _NO_COLORS
|
return _NO_COLORS
|
||||||
elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty():
|
elif 'FORCE_COLOR' in os.environ or sys.stdout.isatty():
|
||||||
return _COLORS
|
return _COLORS
|
||||||
|
@ -52,12 +55,12 @@ def _cprint(fmt: str = '', msg: str = '') -> None:
|
||||||
|
|
||||||
|
|
||||||
def _showwarning(
|
def _showwarning(
|
||||||
message: Union[Warning, str],
|
message: Warning | str,
|
||||||
category: Type[Warning],
|
category: type[Warning],
|
||||||
filename: str,
|
filename: str,
|
||||||
lineno: int,
|
lineno: int,
|
||||||
file: Optional[TextIO] = None,
|
file: TextIO | None = None,
|
||||||
line: Optional[str] = None,
|
line: str | None = None,
|
||||||
) -> None: # pragma: no cover
|
) -> None: # pragma: no cover
|
||||||
_cprint('{yellow}WARNING{reset} {}', str(message))
|
_cprint('{yellow}WARNING{reset} {}', str(message))
|
||||||
|
|
||||||
|
@ -91,7 +94,7 @@ class _ProjectBuilder(ProjectBuilder):
|
||||||
_cprint('{bold}* {}{reset}', message)
|
_cprint('{bold}* {}{reset}', message)
|
||||||
|
|
||||||
|
|
||||||
class _IsolatedEnvBuilder(IsolatedEnvBuilder):
|
class _DefaultIsolatedEnv(DefaultIsolatedEnv):
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def log(message: str) -> None:
|
def log(message: str) -> None:
|
||||||
_cprint('{bold}* {}{reset}', message)
|
_cprint('{bold}* {}{reset}', message)
|
||||||
|
@ -102,27 +105,28 @@ def _format_dep_chain(dep_chain: Sequence[str]) -> str:
|
||||||
|
|
||||||
|
|
||||||
def _build_in_isolated_env(
|
def _build_in_isolated_env(
|
||||||
builder: ProjectBuilder, outdir: PathType, distribution: str, config_settings: Optional[ConfigSettingsType]
|
srcdir: PathType, outdir: PathType, distribution: str, config_settings: ConfigSettingsType | None
|
||||||
) -> str:
|
) -> str:
|
||||||
with _IsolatedEnvBuilder() as env:
|
with _DefaultIsolatedEnv() as env:
|
||||||
builder.python_executable = env.executable
|
builder = _ProjectBuilder.from_isolated_env(env, srcdir)
|
||||||
builder.scripts_dir = env.scripts_dir
|
|
||||||
# first install the build dependencies
|
# first install the build dependencies
|
||||||
env.install(builder.build_system_requires)
|
env.install(builder.build_system_requires)
|
||||||
# then get the extra required dependencies from the backend (which was installed in the call above :P)
|
# then get the extra required dependencies from the backend (which was installed in the call above :P)
|
||||||
env.install(builder.get_requires_for_build(distribution))
|
env.install(builder.get_requires_for_build(distribution, config_settings or {}))
|
||||||
return builder.build(distribution, outdir, config_settings or {})
|
return builder.build(distribution, outdir, config_settings or {})
|
||||||
|
|
||||||
|
|
||||||
def _build_in_current_env(
|
def _build_in_current_env(
|
||||||
builder: ProjectBuilder,
|
srcdir: PathType,
|
||||||
outdir: PathType,
|
outdir: PathType,
|
||||||
distribution: str,
|
distribution: str,
|
||||||
config_settings: Optional[ConfigSettingsType],
|
config_settings: ConfigSettingsType | None,
|
||||||
skip_dependency_check: bool = False,
|
skip_dependency_check: bool = False,
|
||||||
) -> str:
|
) -> str:
|
||||||
|
builder = _ProjectBuilder(srcdir)
|
||||||
|
|
||||||
if not skip_dependency_check:
|
if not skip_dependency_check:
|
||||||
missing = builder.check_dependencies(distribution)
|
missing = builder.check_dependencies(distribution, config_settings or {})
|
||||||
if missing:
|
if missing:
|
||||||
dependencies = ''.join('\n\t' + dep for deps in missing for dep in (deps[0], _format_dep_chain(deps[1:])) if dep)
|
dependencies = ''.join('\n\t' + dep for deps in missing for dep in (deps[0], _format_dep_chain(deps[1:])) if dep)
|
||||||
_cprint()
|
_cprint()
|
||||||
|
@ -133,16 +137,16 @@ def _build_in_current_env(
|
||||||
|
|
||||||
def _build(
|
def _build(
|
||||||
isolation: bool,
|
isolation: bool,
|
||||||
builder: ProjectBuilder,
|
srcdir: PathType,
|
||||||
outdir: PathType,
|
outdir: PathType,
|
||||||
distribution: str,
|
distribution: str,
|
||||||
config_settings: Optional[ConfigSettingsType],
|
config_settings: ConfigSettingsType | None,
|
||||||
skip_dependency_check: bool,
|
skip_dependency_check: bool,
|
||||||
) -> str:
|
) -> str:
|
||||||
if isolation:
|
if isolation:
|
||||||
return _build_in_isolated_env(builder, outdir, distribution, config_settings)
|
return _build_in_isolated_env(srcdir, outdir, distribution, config_settings)
|
||||||
else:
|
else:
|
||||||
return _build_in_current_env(builder, outdir, distribution, config_settings, skip_dependency_check)
|
return _build_in_current_env(srcdir, outdir, distribution, config_settings, skip_dependency_check)
|
||||||
|
|
||||||
|
|
||||||
@contextlib.contextmanager
|
@contextlib.contextmanager
|
||||||
|
@ -172,7 +176,8 @@ def _handle_build_error() -> Iterator[None]:
|
||||||
|
|
||||||
def _natural_language_list(elements: Sequence[str]) -> str:
|
def _natural_language_list(elements: Sequence[str]) -> str:
|
||||||
if len(elements) == 0:
|
if len(elements) == 0:
|
||||||
raise IndexError('no elements')
|
msg = 'no elements'
|
||||||
|
raise IndexError(msg)
|
||||||
elif len(elements) == 1:
|
elif len(elements) == 1:
|
||||||
return elements[0]
|
return elements[0]
|
||||||
else:
|
else:
|
||||||
|
@ -186,7 +191,7 @@ def build_package(
|
||||||
srcdir: PathType,
|
srcdir: PathType,
|
||||||
outdir: PathType,
|
outdir: PathType,
|
||||||
distributions: Sequence[str],
|
distributions: Sequence[str],
|
||||||
config_settings: Optional[ConfigSettingsType] = None,
|
config_settings: ConfigSettingsType | None = None,
|
||||||
isolation: bool = True,
|
isolation: bool = True,
|
||||||
skip_dependency_check: bool = False,
|
skip_dependency_check: bool = False,
|
||||||
) -> Sequence[str]:
|
) -> Sequence[str]:
|
||||||
|
@ -200,10 +205,9 @@ def build_package(
|
||||||
:param isolation: Isolate the build in a separate environment
|
:param isolation: Isolate the build in a separate environment
|
||||||
:param skip_dependency_check: Do not perform the dependency check
|
:param skip_dependency_check: Do not perform the dependency check
|
||||||
"""
|
"""
|
||||||
built: List[str] = []
|
built: list[str] = []
|
||||||
builder = _ProjectBuilder(srcdir)
|
|
||||||
for distribution in distributions:
|
for distribution in distributions:
|
||||||
out = _build(isolation, builder, outdir, distribution, config_settings, skip_dependency_check)
|
out = _build(isolation, srcdir, outdir, distribution, config_settings, skip_dependency_check)
|
||||||
built.append(os.path.basename(out))
|
built.append(os.path.basename(out))
|
||||||
return built
|
return built
|
||||||
|
|
||||||
|
@ -212,7 +216,7 @@ def build_package_via_sdist(
|
||||||
srcdir: PathType,
|
srcdir: PathType,
|
||||||
outdir: PathType,
|
outdir: PathType,
|
||||||
distributions: Sequence[str],
|
distributions: Sequence[str],
|
||||||
config_settings: Optional[ConfigSettingsType] = None,
|
config_settings: ConfigSettingsType | None = None,
|
||||||
isolation: bool = True,
|
isolation: bool = True,
|
||||||
skip_dependency_check: bool = False,
|
skip_dependency_check: bool = False,
|
||||||
) -> Sequence[str]:
|
) -> Sequence[str]:
|
||||||
|
@ -226,28 +230,30 @@ def build_package_via_sdist(
|
||||||
:param isolation: Isolate the build in a separate environment
|
:param isolation: Isolate the build in a separate environment
|
||||||
:param skip_dependency_check: Do not perform the dependency check
|
:param skip_dependency_check: Do not perform the dependency check
|
||||||
"""
|
"""
|
||||||
if 'sdist' in distributions:
|
from ._util import TarFile
|
||||||
raise ValueError('Only binary distributions are allowed but sdist was specified')
|
|
||||||
|
|
||||||
builder = _ProjectBuilder(srcdir)
|
if 'sdist' in distributions:
|
||||||
sdist = _build(isolation, builder, outdir, 'sdist', config_settings, skip_dependency_check)
|
msg = 'Only binary distributions are allowed but sdist was specified'
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
sdist = _build(isolation, srcdir, outdir, 'sdist', config_settings, skip_dependency_check)
|
||||||
|
|
||||||
sdist_name = os.path.basename(sdist)
|
sdist_name = os.path.basename(sdist)
|
||||||
sdist_out = tempfile.mkdtemp(prefix='build-via-sdist-')
|
sdist_out = tempfile.mkdtemp(prefix='build-via-sdist-')
|
||||||
built: List[str] = []
|
built: list[str] = []
|
||||||
# extract sdist
|
if distributions:
|
||||||
with tarfile.open(sdist) as t:
|
# extract sdist
|
||||||
t.extractall(sdist_out)
|
with TarFile.open(sdist) as t:
|
||||||
try:
|
t.extractall(sdist_out)
|
||||||
builder = _ProjectBuilder(os.path.join(sdist_out, sdist_name[: -len('.tar.gz')]))
|
try:
|
||||||
if distributions:
|
_ProjectBuilder.log(f'Building {_natural_language_list(distributions)} from sdist')
|
||||||
builder.log(f'Building {_natural_language_list(distributions)} from sdist')
|
srcdir = os.path.join(sdist_out, sdist_name[: -len('.tar.gz')])
|
||||||
for distribution in distributions:
|
for distribution in distributions:
|
||||||
out = _build(isolation, builder, outdir, distribution, config_settings, skip_dependency_check)
|
out = _build(isolation, srcdir, outdir, distribution, config_settings, skip_dependency_check)
|
||||||
built.append(os.path.basename(out))
|
built.append(os.path.basename(out))
|
||||||
finally:
|
finally:
|
||||||
shutil.rmtree(sdist_out, ignore_errors=True)
|
shutil.rmtree(sdist_out, ignore_errors=True)
|
||||||
return [sdist_name] + built
|
return [sdist_name, *built]
|
||||||
|
|
||||||
|
|
||||||
def main_parser() -> argparse.ArgumentParser:
|
def main_parser() -> argparse.ArgumentParser:
|
||||||
|
@ -258,7 +264,7 @@ def main_parser() -> argparse.ArgumentParser:
|
||||||
description=textwrap.indent(
|
description=textwrap.indent(
|
||||||
textwrap.dedent(
|
textwrap.dedent(
|
||||||
'''
|
'''
|
||||||
A simple, correct PEP 517 build frontend.
|
A simple, correct Python build frontend.
|
||||||
|
|
||||||
By default, a source distribution (sdist) is built from {srcdir}
|
By default, a source distribution (sdist) is built from {srcdir}
|
||||||
and a binary distribution (wheel) is built from the sdist.
|
and a binary distribution (wheel) is built from the sdist.
|
||||||
|
@ -273,7 +279,12 @@ def main_parser() -> argparse.ArgumentParser:
|
||||||
).strip(),
|
).strip(),
|
||||||
' ',
|
' ',
|
||||||
),
|
),
|
||||||
formatter_class=argparse.RawTextHelpFormatter,
|
formatter_class=partial(
|
||||||
|
argparse.RawDescriptionHelpFormatter,
|
||||||
|
# Prevent argparse from taking up the entire width of the terminal window
|
||||||
|
# which impedes readability.
|
||||||
|
width=min(shutil.get_terminal_size().columns - 2, 127),
|
||||||
|
),
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'srcdir',
|
'srcdir',
|
||||||
|
@ -305,6 +316,7 @@ def main_parser() -> argparse.ArgumentParser:
|
||||||
'-o',
|
'-o',
|
||||||
type=str,
|
type=str,
|
||||||
help=f'output directory (defaults to {{srcdir}}{os.sep}dist)',
|
help=f'output directory (defaults to {{srcdir}}{os.sep}dist)',
|
||||||
|
metavar='PATH',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--skip-dependency-check',
|
'--skip-dependency-check',
|
||||||
|
@ -316,19 +328,22 @@ def main_parser() -> argparse.ArgumentParser:
|
||||||
'--no-isolation',
|
'--no-isolation',
|
||||||
'-n',
|
'-n',
|
||||||
action='store_true',
|
action='store_true',
|
||||||
help='do not isolate the build in a virtual environment',
|
help='disable building the project in an isolated virtual environment. '
|
||||||
|
'Build dependencies must be installed separately when this option is used',
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--config-setting',
|
'--config-setting',
|
||||||
'-C',
|
'-C',
|
||||||
action='append',
|
action='append',
|
||||||
help='pass options to the backend. options which begin with a hyphen must be in the form of '
|
help='settings to pass to the backend. Multiple settings can be provided. '
|
||||||
'"--config-setting=--opt(=value)" or "-C--opt(=value)"',
|
'Settings beginning with a hyphen will erroneously be interpreted as options to build if separated '
|
||||||
|
'by a space character; use ``--config-setting=--my-setting -C--my-other-setting``',
|
||||||
|
metavar='KEY[=VALUE]',
|
||||||
)
|
)
|
||||||
return parser
|
return parser
|
||||||
|
|
||||||
|
|
||||||
def main(cli_args: Sequence[str], prog: Optional[str] = None) -> None: # noqa: C901
|
def main(cli_args: Sequence[str], prog: str | None = None) -> None:
|
||||||
"""
|
"""
|
||||||
Parse the CLI arguments and invoke the build process.
|
Parse the CLI arguments and invoke the build process.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
import textwrap
|
||||||
|
import types
|
||||||
|
|
||||||
|
|
||||||
|
class BuildException(Exception):
|
||||||
|
"""
|
||||||
|
Exception raised by :class:`build.ProjectBuilder`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class BuildBackendException(Exception):
|
||||||
|
"""
|
||||||
|
Exception raised when a backend operation fails.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
exception: Exception,
|
||||||
|
description: str | None = None,
|
||||||
|
exc_info: tuple[type[BaseException], BaseException, types.TracebackType]
|
||||||
|
| tuple[None, None, None] = (None, None, None),
|
||||||
|
) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.exception = exception
|
||||||
|
self.exc_info = exc_info
|
||||||
|
self._description = description
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
if self._description:
|
||||||
|
return self._description
|
||||||
|
return f'Backend operation failed: {self.exception!r}'
|
||||||
|
|
||||||
|
|
||||||
|
class BuildSystemTableValidationError(BuildException):
|
||||||
|
"""
|
||||||
|
Exception raised when the ``[build-system]`` table in pyproject.toml is invalid.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return f'Failed to validate `build-system` in pyproject.toml: {self.args[0]}'
|
||||||
|
|
||||||
|
|
||||||
|
class FailedProcessError(Exception):
|
||||||
|
"""
|
||||||
|
Exception raised when a setup or preparation operation fails.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, exception: subprocess.CalledProcessError, description: str) -> None:
|
||||||
|
super().__init__()
|
||||||
|
self.exception = exception
|
||||||
|
self._description = description
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
cmd = ' '.join(self.exception.cmd)
|
||||||
|
description = f"{self._description}\n Command '{cmd}' failed with return code {self.exception.returncode}"
|
||||||
|
for stream_name in ('stdout', 'stderr'):
|
||||||
|
stream = getattr(self.exception, stream_name)
|
||||||
|
if stream:
|
||||||
|
description += f'\n {stream_name}:\n'
|
||||||
|
description += textwrap.indent(stream.decode(), ' ')
|
||||||
|
return description
|
||||||
|
|
||||||
|
|
||||||
|
class TypoWarning(Warning):
|
||||||
|
"""
|
||||||
|
Warning raised when a possible typo is found.
|
||||||
|
"""
|
|
@ -0,0 +1,14 @@
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
import importlib_metadata as metadata
|
||||||
|
elif sys.version_info < (3, 9, 10) or (3, 10, 0) <= sys.version_info < (3, 10, 2):
|
||||||
|
try:
|
||||||
|
import importlib_metadata as metadata
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
from importlib import metadata
|
||||||
|
else:
|
||||||
|
from importlib import metadata
|
||||||
|
|
||||||
|
__all__ = ['metadata']
|
|
@ -0,0 +1,88 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import tarfile
|
||||||
|
import typing
|
||||||
|
|
||||||
|
from collections.abc import Iterator, Set
|
||||||
|
|
||||||
|
|
||||||
|
_WHEEL_FILENAME_REGEX = re.compile(
|
||||||
|
r'(?P<distribution>.+)-(?P<version>.+)'
|
||||||
|
r'(-(?P<build_tag>.+))?-(?P<python_tag>.+)'
|
||||||
|
r'-(?P<abi_tag>.+)-(?P<platform_tag>.+)\.whl'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def check_dependency(
|
||||||
|
req_string: str, ancestral_req_strings: tuple[str, ...] = (), parent_extras: Set[str] = frozenset()
|
||||||
|
) -> Iterator[tuple[str, ...]]:
|
||||||
|
"""
|
||||||
|
Verify that a dependency and all of its dependencies are met.
|
||||||
|
|
||||||
|
:param req_string: Requirement string
|
||||||
|
:param parent_extras: Extras (eg. "test" in myproject[test])
|
||||||
|
:yields: Unmet dependencies
|
||||||
|
"""
|
||||||
|
import packaging.requirements
|
||||||
|
|
||||||
|
from ._importlib import metadata
|
||||||
|
|
||||||
|
req = packaging.requirements.Requirement(req_string)
|
||||||
|
normalised_req_string = str(req)
|
||||||
|
|
||||||
|
# ``Requirement`` doesn't implement ``__eq__`` so we cannot compare reqs for
|
||||||
|
# equality directly but the string representation is stable.
|
||||||
|
if normalised_req_string in ancestral_req_strings:
|
||||||
|
# cyclical dependency, already checked.
|
||||||
|
return
|
||||||
|
|
||||||
|
if req.marker:
|
||||||
|
extras = frozenset(('',)).union(parent_extras)
|
||||||
|
# a requirement can have multiple extras but ``evaluate`` can
|
||||||
|
# only check one at a time.
|
||||||
|
if all(not req.marker.evaluate(environment={'extra': e}) for e in extras):
|
||||||
|
# if the marker conditions are not met, we pretend that the
|
||||||
|
# dependency is satisfied.
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
dist = metadata.distribution(req.name)
|
||||||
|
except metadata.PackageNotFoundError:
|
||||||
|
# dependency is not installed in the environment.
|
||||||
|
yield (*ancestral_req_strings, normalised_req_string)
|
||||||
|
else:
|
||||||
|
if req.specifier and not req.specifier.contains(dist.version, prereleases=True):
|
||||||
|
# the installed version is incompatible.
|
||||||
|
yield (*ancestral_req_strings, normalised_req_string)
|
||||||
|
elif dist.requires:
|
||||||
|
for other_req_string in dist.requires:
|
||||||
|
# yields transitive dependencies that are not satisfied.
|
||||||
|
yield from check_dependency(other_req_string, (*ancestral_req_strings, normalised_req_string), req.extras)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_wheel_filename(filename: str) -> re.Match[str] | None:
|
||||||
|
return _WHEEL_FILENAME_REGEX.match(filename)
|
||||||
|
|
||||||
|
|
||||||
|
if typing.TYPE_CHECKING:
|
||||||
|
TarFile = tarfile.TarFile
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Per https://peps.python.org/pep-0706/, the "data" filter will become
|
||||||
|
# the default in Python 3.14. The first series of releases with the filter
|
||||||
|
# had a broken filter that could not process symlinks correctly.
|
||||||
|
if (
|
||||||
|
(3, 8, 18) <= sys.version_info < (3, 9)
|
||||||
|
or (3, 9, 18) <= sys.version_info < (3, 10)
|
||||||
|
or (3, 10, 13) <= sys.version_info < (3, 11)
|
||||||
|
or (3, 11, 5) <= sys.version_info < (3, 12)
|
||||||
|
or (3, 12) <= sys.version_info < (3, 14)
|
||||||
|
):
|
||||||
|
|
||||||
|
class TarFile(tarfile.TarFile):
|
||||||
|
extraction_filter = staticmethod(tarfile.data_filter)
|
||||||
|
|
||||||
|
else:
|
||||||
|
TarFile = tarfile.TarFile
|
244
src/build/env.py
244
src/build/env.py
|
@ -1,8 +1,8 @@
|
||||||
"""
|
from __future__ import annotations
|
||||||
Creates and manages isolated build environments.
|
|
||||||
"""
|
|
||||||
import abc
|
import abc
|
||||||
import functools
|
import functools
|
||||||
|
import importlib.util
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import platform
|
import platform
|
||||||
|
@ -11,46 +11,37 @@ import subprocess
|
||||||
import sys
|
import sys
|
||||||
import sysconfig
|
import sysconfig
|
||||||
import tempfile
|
import tempfile
|
||||||
|
import typing
|
||||||
import warnings
|
import warnings
|
||||||
|
|
||||||
from types import TracebackType
|
from collections.abc import Collection, Mapping
|
||||||
from typing import Callable, Collection, List, Optional, Tuple, Type
|
|
||||||
|
|
||||||
import build
|
from ._exceptions import FailedProcessError
|
||||||
|
from ._util import check_dependency
|
||||||
|
|
||||||
|
|
||||||
try:
|
if sys.version_info >= (3, 8):
|
||||||
import virtualenv
|
from typing import Protocol
|
||||||
except ModuleNotFoundError:
|
elif typing.TYPE_CHECKING:
|
||||||
virtualenv = None
|
from typing_extensions import Protocol
|
||||||
|
else:
|
||||||
|
Protocol = abc.ABC
|
||||||
|
|
||||||
|
|
||||||
_logger = logging.getLogger(__name__)
|
_logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class IsolatedEnv(metaclass=abc.ABCMeta):
|
class IsolatedEnv(Protocol):
|
||||||
"""Abstract base of isolated build environments, as required by the build project."""
|
"""Isolated build environment ABC."""
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def executable(self) -> str:
|
def python_executable(self) -> str:
|
||||||
"""The executable of the isolated build environment."""
|
"""The Python executable of the isolated environment."""
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@property
|
|
||||||
@abc.abstractmethod
|
|
||||||
def scripts_dir(self) -> str:
|
|
||||||
"""The scripts directory of the isolated build environment."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
@abc.abstractmethod
|
||||||
def install(self, requirements: Collection[str]) -> None:
|
def make_extra_environ(self) -> Mapping[str, str] | None:
|
||||||
"""
|
"""Generate additional env vars specific to the isolated environment."""
|
||||||
Install packages from PEP 508 requirements in the isolated build environment.
|
|
||||||
|
|
||||||
:param requirements: PEP 508 requirements
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
@functools.lru_cache(maxsize=None)
|
@functools.lru_cache(maxsize=None)
|
||||||
|
@ -60,71 +51,99 @@ def _should_use_virtualenv() -> bool:
|
||||||
# virtualenv might be incompatible if it was installed separately
|
# virtualenv might be incompatible if it was installed separately
|
||||||
# from build. This verifies that virtualenv and all of its
|
# from build. This verifies that virtualenv and all of its
|
||||||
# dependencies are installed as specified by build.
|
# dependencies are installed as specified by build.
|
||||||
return virtualenv is not None and not any(
|
return importlib.util.find_spec('virtualenv') is not None and not any(
|
||||||
packaging.requirements.Requirement(d[1]).name == 'virtualenv'
|
packaging.requirements.Requirement(d[1]).name == 'virtualenv'
|
||||||
for d in build.check_dependency('build[virtualenv]')
|
for d in check_dependency('build[virtualenv]')
|
||||||
if len(d) > 1
|
if len(d) > 1
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _subprocess(cmd: List[str]) -> None:
|
def _subprocess(cmd: list[str]) -> None:
|
||||||
"""Invoke subprocess and output stdout and stderr if it fails."""
|
"""Invoke subprocess and output stdout and stderr if it fails."""
|
||||||
try:
|
try:
|
||||||
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
subprocess.run(cmd, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
||||||
except subprocess.CalledProcessError as e:
|
except subprocess.CalledProcessError as e:
|
||||||
print(e.output.decode(), end='', file=sys.stderr)
|
print(e.output.decode(), end='', file=sys.stderr)
|
||||||
raise e
|
raise
|
||||||
|
|
||||||
|
|
||||||
class IsolatedEnvBuilder:
|
class DefaultIsolatedEnv(IsolatedEnv):
|
||||||
"""Builder object for isolated environments."""
|
"""An isolated environment which combines venv and virtualenv with pip."""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __enter__(self) -> DefaultIsolatedEnv:
|
||||||
self._path: Optional[str] = None
|
|
||||||
|
|
||||||
def __enter__(self) -> IsolatedEnv:
|
|
||||||
"""
|
|
||||||
Create an isolated build environment.
|
|
||||||
|
|
||||||
:return: The isolated build environment
|
|
||||||
"""
|
|
||||||
# Call ``realpath`` to prevent spurious warning from being emitted
|
|
||||||
# that the venv location has changed on Windows. The username is
|
|
||||||
# DOS-encoded in the output of tempfile - the location is the same
|
|
||||||
# but the representation of it is different, which confuses venv.
|
|
||||||
# Ref: https://bugs.python.org/issue46171
|
|
||||||
self._path = os.path.realpath(tempfile.mkdtemp(prefix='build-env-'))
|
|
||||||
try:
|
try:
|
||||||
|
self._path = tempfile.mkdtemp(prefix='build-env-')
|
||||||
# use virtualenv when available (as it's faster than venv)
|
# use virtualenv when available (as it's faster than venv)
|
||||||
if _should_use_virtualenv():
|
if _should_use_virtualenv():
|
||||||
self.log('Creating virtualenv isolated environment...')
|
self.log('Creating virtualenv isolated environment...')
|
||||||
executable, scripts_dir = _create_isolated_env_virtualenv(self._path)
|
self._python_executable, self._scripts_dir = _create_isolated_env_virtualenv(self._path)
|
||||||
else:
|
else:
|
||||||
self.log('Creating venv isolated environment...')
|
self.log('Creating venv isolated environment...')
|
||||||
executable, scripts_dir = _create_isolated_env_venv(self._path)
|
|
||||||
return _IsolatedEnvVenvPip(
|
# Call ``realpath`` to prevent spurious warning from being emitted
|
||||||
path=self._path,
|
# that the venv location has changed on Windows. The username is
|
||||||
python_executable=executable,
|
# DOS-encoded in the output of tempfile - the location is the same
|
||||||
scripts_dir=scripts_dir,
|
# but the representation of it is different, which confuses venv.
|
||||||
log=self.log,
|
# Ref: https://bugs.python.org/issue46171
|
||||||
)
|
self._path = os.path.realpath(tempfile.mkdtemp(prefix='build-env-'))
|
||||||
|
self._python_executable, self._scripts_dir = _create_isolated_env_venv(self._path)
|
||||||
except Exception: # cleanup folder if creation fails
|
except Exception: # cleanup folder if creation fails
|
||||||
self.__exit__(*sys.exc_info())
|
self.__exit__(*sys.exc_info())
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def __exit__(
|
return self
|
||||||
self, exc_type: Optional[Type[BaseException]], exc_val: Optional[BaseException], exc_tb: Optional[TracebackType]
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
Delete the created isolated build environment.
|
|
||||||
|
|
||||||
:param exc_type: The type of exception raised (if any)
|
def __exit__(self, *args: object) -> None:
|
||||||
:param exc_val: The value of exception raised (if any)
|
if os.path.exists(self._path): # in case the user already deleted skip remove
|
||||||
:param exc_tb: The traceback of exception raised (if any)
|
|
||||||
"""
|
|
||||||
if self._path is not None and os.path.exists(self._path): # in case the user already deleted skip remove
|
|
||||||
shutil.rmtree(self._path)
|
shutil.rmtree(self._path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self) -> str:
|
||||||
|
"""The location of the isolated build environment."""
|
||||||
|
return self._path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def python_executable(self) -> str:
|
||||||
|
"""The python executable of the isolated build environment."""
|
||||||
|
return self._python_executable
|
||||||
|
|
||||||
|
def make_extra_environ(self) -> dict[str, str]:
|
||||||
|
path = os.environ.get('PATH')
|
||||||
|
return {'PATH': os.pathsep.join([self._scripts_dir, path]) if path is not None else self._scripts_dir}
|
||||||
|
|
||||||
|
def install(self, requirements: Collection[str]) -> None:
|
||||||
|
"""
|
||||||
|
Install packages from PEP 508 requirements in the isolated build environment.
|
||||||
|
|
||||||
|
:param requirements: PEP 508 requirement specification to install
|
||||||
|
|
||||||
|
:note: Passing non-PEP 508 strings will result in undefined behavior, you *should not* rely on it. It is
|
||||||
|
merely an implementation detail, it may change any time without warning.
|
||||||
|
"""
|
||||||
|
if not requirements:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.log(f'Installing packages in isolated environment... ({", ".join(sorted(requirements))})')
|
||||||
|
|
||||||
|
# pip does not honour environment markers in command line arguments
|
||||||
|
# but it does for requirements from a file
|
||||||
|
with tempfile.NamedTemporaryFile('w', prefix='build-reqs-', suffix='.txt', delete=False, encoding='utf-8') as req_file:
|
||||||
|
req_file.write(os.linesep.join(requirements))
|
||||||
|
try:
|
||||||
|
cmd = [
|
||||||
|
self.python_executable,
|
||||||
|
'-Im',
|
||||||
|
'pip',
|
||||||
|
'install',
|
||||||
|
'--use-pep517',
|
||||||
|
'--no-warn-script-location',
|
||||||
|
'-r',
|
||||||
|
os.path.abspath(req_file.name),
|
||||||
|
]
|
||||||
|
_subprocess(cmd)
|
||||||
|
finally:
|
||||||
|
os.unlink(req_file.name)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def log(message: str) -> None:
|
def log(message: str) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -141,85 +160,15 @@ class IsolatedEnvBuilder:
|
||||||
_logger.log(logging.INFO, message)
|
_logger.log(logging.INFO, message)
|
||||||
|
|
||||||
|
|
||||||
class _IsolatedEnvVenvPip(IsolatedEnv):
|
def _create_isolated_env_virtualenv(path: str) -> tuple[str, str]:
|
||||||
"""
|
|
||||||
Isolated build environment context manager
|
|
||||||
|
|
||||||
Non-standard paths injected directly to sys.path will still be passed to the environment.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
path: str,
|
|
||||||
python_executable: str,
|
|
||||||
scripts_dir: str,
|
|
||||||
log: Callable[[str], None],
|
|
||||||
) -> None:
|
|
||||||
"""
|
|
||||||
:param path: The path where the environment exists
|
|
||||||
:param python_executable: The python executable within the environment
|
|
||||||
:param log: Log function
|
|
||||||
"""
|
|
||||||
self._path = path
|
|
||||||
self._python_executable = python_executable
|
|
||||||
self._scripts_dir = scripts_dir
|
|
||||||
self._log = log
|
|
||||||
|
|
||||||
@property
|
|
||||||
def path(self) -> str:
|
|
||||||
"""The location of the isolated build environment."""
|
|
||||||
return self._path
|
|
||||||
|
|
||||||
@property
|
|
||||||
def executable(self) -> str:
|
|
||||||
"""The python executable of the isolated build environment."""
|
|
||||||
return self._python_executable
|
|
||||||
|
|
||||||
@property
|
|
||||||
def scripts_dir(self) -> str:
|
|
||||||
return self._scripts_dir
|
|
||||||
|
|
||||||
def install(self, requirements: Collection[str]) -> None:
|
|
||||||
"""
|
|
||||||
Install packages from PEP 508 requirements in the isolated build environment.
|
|
||||||
|
|
||||||
:param requirements: PEP 508 requirement specification to install
|
|
||||||
|
|
||||||
:note: Passing non-PEP 508 strings will result in undefined behavior, you *should not* rely on it. It is
|
|
||||||
merely an implementation detail, it may change any time without warning.
|
|
||||||
"""
|
|
||||||
if not requirements:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._log('Installing packages in isolated environment... ({})'.format(', '.join(sorted(requirements))))
|
|
||||||
|
|
||||||
# pip does not honour environment markers in command line arguments
|
|
||||||
# but it does for requirements from a file
|
|
||||||
with tempfile.NamedTemporaryFile('w+', prefix='build-reqs-', suffix='.txt', delete=False) as req_file:
|
|
||||||
req_file.write(os.linesep.join(requirements))
|
|
||||||
try:
|
|
||||||
cmd = [
|
|
||||||
self.executable,
|
|
||||||
'-Im',
|
|
||||||
'pip',
|
|
||||||
'install',
|
|
||||||
'--use-pep517',
|
|
||||||
'--no-warn-script-location',
|
|
||||||
'-r',
|
|
||||||
os.path.abspath(req_file.name),
|
|
||||||
]
|
|
||||||
_subprocess(cmd)
|
|
||||||
finally:
|
|
||||||
os.unlink(req_file.name)
|
|
||||||
|
|
||||||
|
|
||||||
def _create_isolated_env_virtualenv(path: str) -> Tuple[str, str]:
|
|
||||||
"""
|
"""
|
||||||
We optionally can use the virtualenv package to provision a virtual environment.
|
We optionally can use the virtualenv package to provision a virtual environment.
|
||||||
|
|
||||||
:param path: The path where to create the isolated build environment
|
:param path: The path where to create the isolated build environment
|
||||||
:return: The Python executable and script folder
|
:return: The Python executable and script folder
|
||||||
"""
|
"""
|
||||||
|
import virtualenv
|
||||||
|
|
||||||
cmd = [str(path), '--no-setuptools', '--no-wheel', '--activators', '']
|
cmd = [str(path), '--no-setuptools', '--no-wheel', '--activators', '']
|
||||||
result = virtualenv.cli_run(cmd, setup_logging=False)
|
result = virtualenv.cli_run(cmd, setup_logging=False)
|
||||||
executable = str(result.creator.exe)
|
executable = str(result.creator.exe)
|
||||||
|
@ -240,12 +189,12 @@ def _fs_supports_symlink() -> bool:
|
||||||
try:
|
try:
|
||||||
os.symlink(tmp_file.name, dest)
|
os.symlink(tmp_file.name, dest)
|
||||||
os.unlink(dest)
|
os.unlink(dest)
|
||||||
return True
|
|
||||||
except (OSError, NotImplementedError, AttributeError):
|
except (OSError, NotImplementedError, AttributeError):
|
||||||
return False
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
def _create_isolated_env_venv(path: str) -> Tuple[str, str]:
|
def _create_isolated_env_venv(path: str) -> tuple[str, str]:
|
||||||
"""
|
"""
|
||||||
On Python 3 we use the venv package from the standard library.
|
On Python 3 we use the venv package from the standard library.
|
||||||
|
|
||||||
|
@ -268,12 +217,12 @@ def _create_isolated_env_venv(path: str) -> Tuple[str, str]:
|
||||||
warnings.filterwarnings('ignore', 'check_home argument is deprecated and ignored.', DeprecationWarning)
|
warnings.filterwarnings('ignore', 'check_home argument is deprecated and ignored.', DeprecationWarning)
|
||||||
venv.EnvBuilder(with_pip=True, symlinks=symlinks).create(path)
|
venv.EnvBuilder(with_pip=True, symlinks=symlinks).create(path)
|
||||||
except subprocess.CalledProcessError as exc:
|
except subprocess.CalledProcessError as exc:
|
||||||
raise build.FailedProcessError(exc, 'Failed to create venv. Maybe try installing virtualenv.') from None
|
raise FailedProcessError(exc, 'Failed to create venv. Maybe try installing virtualenv.') from None
|
||||||
|
|
||||||
executable, script_dir, purelib = _find_executable_and_scripts(path)
|
executable, script_dir, purelib = _find_executable_and_scripts(path)
|
||||||
|
|
||||||
# Get the version of pip in the environment
|
# Get the version of pip in the environment
|
||||||
pip_distribution = next(iter(metadata.distributions(name='pip', path=[purelib]))) # type: ignore[no-untyped-call]
|
pip_distribution = next(iter(metadata.distributions(name='pip', path=[purelib])))
|
||||||
current_pip_version = packaging.version.Version(pip_distribution.version)
|
current_pip_version = packaging.version.Version(pip_distribution.version)
|
||||||
|
|
||||||
if platform.system() == 'Darwin' and int(platform.mac_ver()[0].split('.')[0]) >= 11:
|
if platform.system() == 'Darwin' and int(platform.mac_ver()[0].split('.')[0]) >= 11:
|
||||||
|
@ -293,7 +242,7 @@ def _create_isolated_env_venv(path: str) -> Tuple[str, str]:
|
||||||
return executable, script_dir
|
return executable, script_dir
|
||||||
|
|
||||||
|
|
||||||
def _find_executable_and_scripts(path: str) -> Tuple[str, str, str]:
|
def _find_executable_and_scripts(path: str) -> tuple[str, str, str]:
|
||||||
"""
|
"""
|
||||||
Detect the Python executable and script folder of a virtual environment.
|
Detect the Python executable and script folder of a virtual environment.
|
||||||
|
|
||||||
|
@ -329,12 +278,13 @@ def _find_executable_and_scripts(path: str) -> Tuple[str, str, str]:
|
||||||
paths = sysconfig.get_paths(vars=config_vars)
|
paths = sysconfig.get_paths(vars=config_vars)
|
||||||
executable = os.path.join(paths['scripts'], 'python.exe' if sys.platform.startswith('win') else 'python')
|
executable = os.path.join(paths['scripts'], 'python.exe' if sys.platform.startswith('win') else 'python')
|
||||||
if not os.path.exists(executable):
|
if not os.path.exists(executable):
|
||||||
raise RuntimeError(f'Virtual environment creation failed, executable {executable} missing')
|
msg = f'Virtual environment creation failed, executable {executable} missing'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
return executable, paths['scripts'], paths['purelib']
|
return executable, paths['scripts'], paths['purelib']
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'IsolatedEnvBuilder',
|
|
||||||
'IsolatedEnv',
|
'IsolatedEnv',
|
||||||
|
'DefaultIsolatedEnv',
|
||||||
]
|
]
|
||||||
|
|
|
@ -1,57 +1,57 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
import os
|
from __future__ import annotations
|
||||||
|
|
||||||
import pathlib
|
import pathlib
|
||||||
import sys
|
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
import pep517
|
import pyproject_hooks
|
||||||
|
|
||||||
import build
|
from . import PathType, ProjectBuilder, RunnerType
|
||||||
import build.env
|
from ._importlib import metadata
|
||||||
|
from .env import DefaultIsolatedEnv
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info >= (3, 8):
|
def _project_wheel_metadata(builder: ProjectBuilder) -> metadata.PackageMetadata:
|
||||||
import importlib.metadata as importlib_metadata
|
|
||||||
else:
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
|
|
||||||
def _project_wheel_metadata(builder: build.ProjectBuilder) -> 'importlib_metadata.PackageMetadata':
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
with tempfile.TemporaryDirectory() as tmpdir:
|
||||||
path = pathlib.Path(builder.metadata_path(tmpdir))
|
path = pathlib.Path(builder.metadata_path(tmpdir))
|
||||||
# https://github.com/python/importlib_metadata/pull/343
|
return metadata.PathDistribution(path).metadata
|
||||||
return importlib_metadata.PathDistribution(path).metadata # type: ignore[arg-type]
|
|
||||||
|
|
||||||
|
|
||||||
def project_wheel_metadata(
|
def project_wheel_metadata(
|
||||||
srcdir: build.PathType,
|
source_dir: PathType,
|
||||||
isolated: bool = True,
|
isolated: bool = True,
|
||||||
) -> 'importlib_metadata.PackageMetadata':
|
*,
|
||||||
|
runner: RunnerType = pyproject_hooks.quiet_subprocess_runner,
|
||||||
|
) -> metadata.PackageMetadata:
|
||||||
"""
|
"""
|
||||||
Return the wheel metadata for a project.
|
Return the wheel metadata for a project.
|
||||||
|
|
||||||
Uses the ``prepare_metadata_for_build_wheel`` hook if available,
|
Uses the ``prepare_metadata_for_build_wheel`` hook if available,
|
||||||
otherwise ``build_wheel``.
|
otherwise ``build_wheel``.
|
||||||
|
|
||||||
:param srcdir: Project source directory
|
:param source_dir: Project source directory
|
||||||
:param isolated: Whether or not to run invoke the backend in the current
|
:param isolated: Whether or not to run invoke the backend in the current
|
||||||
environment or to create an isolated one and invoke it
|
environment or to create an isolated one and invoke it
|
||||||
there.
|
there.
|
||||||
|
:param runner: An alternative runner for backend subprocesses
|
||||||
"""
|
"""
|
||||||
builder = build.ProjectBuilder(
|
|
||||||
os.fspath(srcdir),
|
|
||||||
runner=pep517.quiet_subprocess_runner,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not isolated:
|
if isolated:
|
||||||
return _project_wheel_metadata(builder)
|
with DefaultIsolatedEnv() as env:
|
||||||
|
builder = ProjectBuilder.from_isolated_env(
|
||||||
with build.env.IsolatedEnvBuilder() as env:
|
env,
|
||||||
builder.python_executable = env.executable
|
source_dir,
|
||||||
builder.scripts_dir = env.scripts_dir
|
runner=runner,
|
||||||
env.install(builder.build_system_requires)
|
)
|
||||||
env.install(builder.get_requires_for_build('wheel'))
|
env.install(builder.build_system_requires)
|
||||||
|
env.install(builder.get_requires_for_build('wheel'))
|
||||||
|
return _project_wheel_metadata(builder)
|
||||||
|
else:
|
||||||
|
builder = ProjectBuilder(
|
||||||
|
source_dir,
|
||||||
|
runner=runner,
|
||||||
|
)
|
||||||
return _project_wheel_metadata(builder)
|
return _project_wheel_metadata(builder)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import contextlib
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -13,6 +14,12 @@ import pytest
|
||||||
import build.env
|
import build.env
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info < (3, 8):
|
||||||
|
import importlib_metadata as metadata
|
||||||
|
else:
|
||||||
|
from importlib import metadata
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
os.environ['PYTHONWARNINGS'] = 'ignore:DEPRECATION::pip._internal.cli.base_command' # for when not run within tox
|
os.environ['PYTHONWARNINGS'] = 'ignore:DEPRECATION::pip._internal.cli.base_command' # for when not run within tox
|
||||||
os.environ['PIP_DISABLE_PIP_VERSION_CHECK'] = '1' # do not pollute stderr with upgrade advisory
|
os.environ['PIP_DISABLE_PIP_VERSION_CHECK'] = '1' # do not pollute stderr with upgrade advisory
|
||||||
|
@ -31,7 +38,8 @@ def pytest_collection_modifyitems(config, items):
|
||||||
skip_other = pytest.mark.skip(reason='only integration tests are run (got --only-integration flag)')
|
skip_other = pytest.mark.skip(reason='only integration tests are run (got --only-integration flag)')
|
||||||
|
|
||||||
if config.getoption('--run-integration') and config.getoption('--only-integration'): # pragma: no cover
|
if config.getoption('--run-integration') and config.getoption('--only-integration'): # pragma: no cover
|
||||||
raise pytest.UsageError("--run-integration and --only-integration can't be used together, choose one")
|
msg = "--run-integration and --only-integration can't be used together, choose one"
|
||||||
|
raise pytest.UsageError(msg)
|
||||||
|
|
||||||
if len(items) == 1: # do not require flags if called directly
|
if len(items) == 1: # do not require flags if called directly
|
||||||
return
|
return
|
||||||
|
@ -109,3 +117,25 @@ def tmp_dir():
|
||||||
@pytest.fixture(autouse=True)
|
@pytest.fixture(autouse=True)
|
||||||
def force_venv(mocker):
|
def force_venv(mocker):
|
||||||
mocker.patch.object(build.env, '_should_use_virtualenv', lambda: False)
|
mocker.patch.object(build.env, '_should_use_virtualenv', lambda: False)
|
||||||
|
|
||||||
|
|
||||||
|
def pytest_report_header() -> str:
|
||||||
|
interesting_packages = [
|
||||||
|
'build',
|
||||||
|
'colorama',
|
||||||
|
'filelock',
|
||||||
|
'packaging',
|
||||||
|
'pip',
|
||||||
|
'pyproject_hooks',
|
||||||
|
'setuptools',
|
||||||
|
'tomli',
|
||||||
|
'virtualenv',
|
||||||
|
'wheel',
|
||||||
|
]
|
||||||
|
valid = []
|
||||||
|
for package in interesting_packages:
|
||||||
|
# Old versions of importlib_metadata made this FileNotFoundError
|
||||||
|
with contextlib.suppress(ModuleNotFoundError, FileNotFoundError):
|
||||||
|
valid.append(f'{package}=={metadata.version(package)}')
|
||||||
|
reqs = ' '.join(valid)
|
||||||
|
return f'installed packages of interest: {reqs}'
|
||||||
|
|
|
@ -1,9 +1,10 @@
|
||||||
importlib-metadata==0.22
|
importlib-metadata==4.6
|
||||||
packaging==19.0
|
packaging==19.0
|
||||||
pep517==0.9.1
|
pyproject_hooks==1.0
|
||||||
setuptools==42.0.0; python_version < "3.10"
|
setuptools==42.0.0; python_version < "3.10"
|
||||||
setuptools==56.0.0; python_version >= "3.10"
|
setuptools==56.0.0; python_version == "3.10"
|
||||||
toml==0.10.0
|
setuptools==56.0.0; python_version == "3.11"
|
||||||
tomli==1.0.0
|
setuptools==67.8.0; python_version >= "3.12"
|
||||||
|
tomli==1.1.0
|
||||||
virtualenv==20.0.35
|
virtualenv==20.0.35
|
||||||
wheel==0.36.0
|
wheel==0.36.0
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
from setuptools.build_meta import build_sdist # noqa: F401
|
from setuptools.build_meta import build_sdist as build_sdist
|
||||||
|
|
||||||
|
|
||||||
def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
|
def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
|
||||||
|
|
|
@ -16,7 +16,8 @@ def build_sdist(sdist_directory, config_settings=None):
|
||||||
|
|
||||||
def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
|
def build_wheel(wheel_directory, config_settings=None, metadata_directory=None):
|
||||||
if not os.path.isfile('some-file-that-is-needed-for-build.txt'):
|
if not os.path.isfile('some-file-that-is-needed-for-build.txt'):
|
||||||
raise FileNotFoundError('some-file-that-is-needed-for-build.txt is missing!')
|
msg = 'some-file-that-is-needed-for-build.txt is missing!'
|
||||||
|
raise FileNotFoundError(msg)
|
||||||
# pragma: no cover
|
# pragma: no cover
|
||||||
file = 'test_cant_build_via_sdist-1.0.0-py2.py3-none-any.whl'
|
file = 'test_cant_build_via_sdist-1.0.0-py2.py3-none-any.whl'
|
||||||
zipfile.ZipFile(os.path.join(wheel_directory, file), 'w').close()
|
zipfile.ZipFile(os.path.join(wheel_directory, file), 'w').close()
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
from setuptools.build_meta import build_sdist, build_wheel # noqa: F401
|
from setuptools.build_meta import build_sdist as build_sdist
|
||||||
|
from setuptools.build_meta import build_wheel as build_wheel
|
||||||
|
|
|
@ -1,6 +1,5 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
import collections
|
import collections
|
||||||
import inspect
|
|
||||||
import logging
|
import logging
|
||||||
import platform
|
import platform
|
||||||
import subprocess
|
import subprocess
|
||||||
|
@ -20,15 +19,15 @@ IS_PYPY3 = platform.python_implementation() == 'PyPy'
|
||||||
@pytest.mark.isolated
|
@pytest.mark.isolated
|
||||||
def test_isolation():
|
def test_isolation():
|
||||||
subprocess.check_call([sys.executable, '-c', 'import build.env'])
|
subprocess.check_call([sys.executable, '-c', 'import build.env'])
|
||||||
with build.env.IsolatedEnvBuilder() as env:
|
with build.env.DefaultIsolatedEnv() as env:
|
||||||
with pytest.raises(subprocess.CalledProcessError):
|
with pytest.raises(subprocess.CalledProcessError):
|
||||||
debug = 'import sys; import os; print(os.linesep.join(sys.path));'
|
debug = 'import sys; import os; print(os.linesep.join(sys.path));'
|
||||||
subprocess.check_call([env.executable, '-c', f'{debug} import build.env'])
|
subprocess.check_call([env.python_executable, '-c', f'{debug} import build.env'])
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.isolated
|
@pytest.mark.isolated
|
||||||
def test_isolated_environment_install(mocker):
|
def test_isolated_environment_install(mocker):
|
||||||
with build.env.IsolatedEnvBuilder() as env:
|
with build.env.DefaultIsolatedEnv() as env:
|
||||||
mocker.patch('build.env._subprocess')
|
mocker.patch('build.env._subprocess')
|
||||||
|
|
||||||
env.install([])
|
env.install([])
|
||||||
|
@ -38,7 +37,7 @@ def test_isolated_environment_install(mocker):
|
||||||
build.env._subprocess.assert_called()
|
build.env._subprocess.assert_called()
|
||||||
args = build.env._subprocess.call_args[0][0][:-1]
|
args = build.env._subprocess.call_args[0][0][:-1]
|
||||||
assert args == [
|
assert args == [
|
||||||
env.executable,
|
env.python_executable,
|
||||||
'-Im',
|
'-Im',
|
||||||
'pip',
|
'pip',
|
||||||
'install',
|
'install',
|
||||||
|
@ -52,7 +51,7 @@ def test_isolated_environment_install(mocker):
|
||||||
@pytest.mark.skipif(sys.platform != 'darwin', reason='workaround for Apple Python')
|
@pytest.mark.skipif(sys.platform != 'darwin', reason='workaround for Apple Python')
|
||||||
def test_can_get_venv_paths_with_conflicting_default_scheme(mocker):
|
def test_can_get_venv_paths_with_conflicting_default_scheme(mocker):
|
||||||
get_scheme_names = mocker.patch('sysconfig.get_scheme_names', return_value=('osx_framework_library',))
|
get_scheme_names = mocker.patch('sysconfig.get_scheme_names', return_value=('osx_framework_library',))
|
||||||
with build.env.IsolatedEnvBuilder():
|
with build.env.DefaultIsolatedEnv():
|
||||||
pass
|
pass
|
||||||
assert get_scheme_names.call_count == 1
|
assert get_scheme_names.call_count == 1
|
||||||
|
|
||||||
|
@ -62,7 +61,7 @@ def test_can_get_venv_paths_with_posix_local_default_scheme(mocker):
|
||||||
get_paths = mocker.spy(sysconfig, 'get_paths')
|
get_paths = mocker.spy(sysconfig, 'get_paths')
|
||||||
# We should never call this, but we patch it to ensure failure if we do
|
# We should never call this, but we patch it to ensure failure if we do
|
||||||
get_default_scheme = mocker.patch('sysconfig.get_default_scheme', return_value='posix_local')
|
get_default_scheme = mocker.patch('sysconfig.get_default_scheme', return_value='posix_local')
|
||||||
with build.env.IsolatedEnvBuilder():
|
with build.env.DefaultIsolatedEnv():
|
||||||
pass
|
pass
|
||||||
get_paths.assert_called_once_with(scheme='posix_prefix', vars=mocker.ANY)
|
get_paths.assert_called_once_with(scheme='posix_prefix', vars=mocker.ANY)
|
||||||
assert get_default_scheme.call_count == 0
|
assert get_default_scheme.call_count == 0
|
||||||
|
@ -71,7 +70,7 @@ def test_can_get_venv_paths_with_posix_local_default_scheme(mocker):
|
||||||
def test_executable_missing_post_creation(mocker):
|
def test_executable_missing_post_creation(mocker):
|
||||||
venv_create = mocker.patch('venv.EnvBuilder.create')
|
venv_create = mocker.patch('venv.EnvBuilder.create')
|
||||||
with pytest.raises(RuntimeError, match='Virtual environment creation failed, executable .* missing'):
|
with pytest.raises(RuntimeError, match='Virtual environment creation failed, executable .* missing'):
|
||||||
with build.env.IsolatedEnvBuilder():
|
with build.env.DefaultIsolatedEnv():
|
||||||
pass
|
pass
|
||||||
assert venv_create.call_count == 1
|
assert venv_create.call_count == 1
|
||||||
|
|
||||||
|
@ -105,8 +104,7 @@ def test_isolated_env_log(mocker, caplog, package_test_flit):
|
||||||
mocker.patch('build.env._subprocess')
|
mocker.patch('build.env._subprocess')
|
||||||
caplog.set_level(logging.DEBUG)
|
caplog.set_level(logging.DEBUG)
|
||||||
|
|
||||||
builder = build.env.IsolatedEnvBuilder()
|
builder = build.env.DefaultIsolatedEnv()
|
||||||
frameinfo = inspect.getframeinfo(inspect.currentframe())
|
|
||||||
builder.log('something') # line number 106
|
builder.log('something') # line number 106
|
||||||
with builder as env:
|
with builder as env:
|
||||||
env.install(['something'])
|
env.install(['something'])
|
||||||
|
@ -116,19 +114,15 @@ def test_isolated_env_log(mocker, caplog, package_test_flit):
|
||||||
('INFO', 'Creating venv isolated environment...'),
|
('INFO', 'Creating venv isolated environment...'),
|
||||||
('INFO', 'Installing packages in isolated environment... (something)'),
|
('INFO', 'Installing packages in isolated environment... (something)'),
|
||||||
]
|
]
|
||||||
if sys.version_info >= (3, 8): # stacklevel
|
|
||||||
assert [(record.lineno) for record in caplog.records] == [
|
|
||||||
frameinfo.lineno + 1,
|
|
||||||
frameinfo.lineno - 6,
|
|
||||||
frameinfo.lineno + 85,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.isolated
|
@pytest.mark.isolated
|
||||||
def test_default_pip_is_never_too_old():
|
def test_default_pip_is_never_too_old():
|
||||||
with build.env.IsolatedEnvBuilder() as env:
|
with build.env.DefaultIsolatedEnv() as env:
|
||||||
version = subprocess.check_output(
|
version = subprocess.check_output(
|
||||||
[env.executable, '-c', 'import pip; print(pip.__version__)'], universal_newlines=True
|
[env.python_executable, '-c', 'import pip; print(pip.__version__)'],
|
||||||
|
text=True,
|
||||||
|
encoding='utf-8',
|
||||||
).strip()
|
).strip()
|
||||||
assert Version(version) >= Version('19.1')
|
assert Version(version) >= Version('19.1')
|
||||||
|
|
||||||
|
@ -147,7 +141,7 @@ def test_pip_needs_upgrade_mac_os_11(mocker, pip_version, arch):
|
||||||
mocker.patch(metadata_name + '.distributions', return_value=(SimpleNamespace(version=pip_version),))
|
mocker.patch(metadata_name + '.distributions', return_value=(SimpleNamespace(version=pip_version),))
|
||||||
|
|
||||||
min_version = Version('20.3' if arch == 'x86_64' else '21.0.1')
|
min_version = Version('20.3' if arch == 'x86_64' else '21.0.1')
|
||||||
with build.env.IsolatedEnvBuilder():
|
with build.env.DefaultIsolatedEnv():
|
||||||
if Version(pip_version) < min_version:
|
if Version(pip_version) < min_version:
|
||||||
print(_subprocess.call_args_list)
|
print(_subprocess.call_args_list)
|
||||||
upgrade_call, uninstall_call = _subprocess.call_args_list
|
upgrade_call, uninstall_call = _subprocess.call_args_list
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import importlib.util
|
||||||
import os
|
import os
|
||||||
import os.path
|
import os.path
|
||||||
import platform
|
import platform
|
||||||
|
@ -93,8 +94,8 @@ def get_project(name, tmp_path):
|
||||||
)
|
)
|
||||||
@pytest.mark.isolated
|
@pytest.mark.isolated
|
||||||
def test_build(monkeypatch, project, args, call, tmp_path):
|
def test_build(monkeypatch, project, args, call, tmp_path):
|
||||||
if project == 'flit' and '--no-isolation' in args:
|
if project in {'build', 'flit'} and '--no-isolation' in args:
|
||||||
pytest.xfail("can't build flit without isolation due to missing dependencies")
|
pytest.xfail(f"can't build {project} without isolation due to missing dependencies")
|
||||||
if project == 'Solaar' and IS_WINDOWS and IS_PYPY3:
|
if project == 'Solaar' and IS_WINDOWS and IS_PYPY3:
|
||||||
pytest.xfail('Solaar fails building wheels via sdists on Windows on PyPy 3')
|
pytest.xfail('Solaar fails building wheels via sdists on Windows on PyPy 3')
|
||||||
|
|
||||||
|
@ -110,7 +111,7 @@ def test_build(monkeypatch, project, args, call, tmp_path):
|
||||||
pytest.skip('Running via PYTHONPATH, so the pyproject-build entrypoint is not available')
|
pytest.skip('Running via PYTHONPATH, so the pyproject-build entrypoint is not available')
|
||||||
path = get_project(project, tmp_path)
|
path = get_project(project, tmp_path)
|
||||||
pkgs = tmp_path / 'pkgs'
|
pkgs = tmp_path / 'pkgs'
|
||||||
args = [str(path), '-o', str(pkgs)] + args
|
args = [str(path), '-o', str(pkgs), *args]
|
||||||
|
|
||||||
if call is None:
|
if call is None:
|
||||||
build.__main__.main(args)
|
build.__main__.main(args)
|
||||||
|
@ -123,11 +124,7 @@ def test_build(monkeypatch, project, args, call, tmp_path):
|
||||||
|
|
||||||
|
|
||||||
def test_isolation(tmp_dir, package_test_flit, mocker):
|
def test_isolation(tmp_dir, package_test_flit, mocker):
|
||||||
try:
|
if importlib.util.find_spec('flit_core'):
|
||||||
import flit_core # noqa: F401
|
|
||||||
except ModuleNotFoundError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
pytest.xfail('flit_core is available -- we want it missing!') # pragma: no cover
|
pytest.xfail('flit_core is available -- we want it missing!') # pragma: no cover
|
||||||
|
|
||||||
mocker.patch('build.__main__._error')
|
mocker.patch('build.__main__._error')
|
||||||
|
|
|
@ -20,6 +20,8 @@ build_open_owner = 'builtins'
|
||||||
cwd = os.getcwd()
|
cwd = os.getcwd()
|
||||||
out = os.path.join(cwd, 'dist')
|
out = os.path.join(cwd, 'dist')
|
||||||
|
|
||||||
|
ANSI_STRIP = re.compile(r'\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
('cli_args', 'build_args', 'hook'),
|
('cli_args', 'build_args', 'hook'),
|
||||||
|
@ -96,8 +98,9 @@ def test_parse_args(mocker, cli_args, build_args, hook):
|
||||||
build.__main__.build_package.assert_called_with(*build_args)
|
build.__main__.build_package.assert_called_with(*build_args)
|
||||||
elif hook == 'build_package_via_sdist':
|
elif hook == 'build_package_via_sdist':
|
||||||
build.__main__.build_package_via_sdist.assert_called_with(*build_args)
|
build.__main__.build_package_via_sdist.assert_called_with(*build_args)
|
||||||
else:
|
else: # pragma: no cover
|
||||||
raise ValueError(f'Unknown hook {hook}') # pragma: no cover
|
msg = f'Unknown hook {hook}'
|
||||||
|
raise ValueError(msg)
|
||||||
|
|
||||||
|
|
||||||
def test_prog():
|
def test_prog():
|
||||||
|
@ -127,13 +130,13 @@ def test_build_isolated(mocker, package_test_flit):
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
mocker.patch('build.__main__._error')
|
mocker.patch('build.__main__._error')
|
||||||
install = mocker.patch('build.env._IsolatedEnvVenvPip.install')
|
install = mocker.patch('build.env.DefaultIsolatedEnv.install')
|
||||||
|
|
||||||
build.__main__.build_package(package_test_flit, '.', ['sdist'])
|
build.__main__.build_package(package_test_flit, '.', ['sdist'])
|
||||||
|
|
||||||
install.assert_any_call({'flit_core >=2,<3'})
|
install.assert_any_call({'flit_core >=2,<3'})
|
||||||
|
|
||||||
required_cmd.assert_called_with('sdist')
|
required_cmd.assert_called_with('sdist', {})
|
||||||
install.assert_any_call(['dep1', 'dep2'])
|
install.assert_any_call(['dep1', 'dep2'])
|
||||||
|
|
||||||
build_cmd.assert_called_with('sdist', '.', {})
|
build_cmd.assert_called_with('sdist', '.', {})
|
||||||
|
@ -170,7 +173,7 @@ def test_build_no_isolation_with_check_deps(mocker, package_test_flit, missing_d
|
||||||
@pytest.mark.isolated
|
@pytest.mark.isolated
|
||||||
def test_build_raises_build_exception(mocker, package_test_flit):
|
def test_build_raises_build_exception(mocker, package_test_flit):
|
||||||
mocker.patch('build.ProjectBuilder.get_requires_for_build', side_effect=build.BuildException)
|
mocker.patch('build.ProjectBuilder.get_requires_for_build', side_effect=build.BuildException)
|
||||||
mocker.patch('build.env._IsolatedEnvVenvPip.install')
|
mocker.patch('build.env.DefaultIsolatedEnv.install')
|
||||||
|
|
||||||
with pytest.raises(build.BuildException):
|
with pytest.raises(build.BuildException):
|
||||||
build.__main__.build_package(package_test_flit, '.', ['sdist'])
|
build.__main__.build_package(package_test_flit, '.', ['sdist'])
|
||||||
|
@ -179,13 +182,14 @@ def test_build_raises_build_exception(mocker, package_test_flit):
|
||||||
@pytest.mark.isolated
|
@pytest.mark.isolated
|
||||||
def test_build_raises_build_backend_exception(mocker, package_test_flit):
|
def test_build_raises_build_backend_exception(mocker, package_test_flit):
|
||||||
mocker.patch('build.ProjectBuilder.get_requires_for_build', side_effect=build.BuildBackendException(Exception('a')))
|
mocker.patch('build.ProjectBuilder.get_requires_for_build', side_effect=build.BuildBackendException(Exception('a')))
|
||||||
mocker.patch('build.env._IsolatedEnvVenvPip.install')
|
mocker.patch('build.env.DefaultIsolatedEnv.install')
|
||||||
|
|
||||||
msg = f"Backend operation failed: Exception('a'{',' if sys.version_info < (3, 7) else ''})"
|
msg = f"Backend operation failed: Exception('a'{',' if sys.version_info < (3, 7) else ''})"
|
||||||
with pytest.raises(build.BuildBackendException, match=re.escape(msg)):
|
with pytest.raises(build.BuildBackendException, match=re.escape(msg)):
|
||||||
build.__main__.build_package(package_test_flit, '.', ['sdist'])
|
build.__main__.build_package(package_test_flit, '.', ['sdist'])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.network
|
||||||
@pytest.mark.pypy3323bug
|
@pytest.mark.pypy3323bug
|
||||||
def test_build_package(tmp_dir, package_test_setuptools):
|
def test_build_package(tmp_dir, package_test_setuptools):
|
||||||
build.__main__.build_package(package_test_setuptools, tmp_dir, ['sdist', 'wheel'])
|
build.__main__.build_package(package_test_setuptools, tmp_dir, ['sdist', 'wheel'])
|
||||||
|
@ -196,6 +200,7 @@ def test_build_package(tmp_dir, package_test_setuptools):
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.network
|
||||||
@pytest.mark.pypy3323bug
|
@pytest.mark.pypy3323bug
|
||||||
def test_build_package_via_sdist(tmp_dir, package_test_setuptools):
|
def test_build_package_via_sdist(tmp_dir, package_test_setuptools):
|
||||||
build.__main__.build_package_via_sdist(package_test_setuptools, tmp_dir, ['wheel'])
|
build.__main__.build_package_via_sdist(package_test_setuptools, tmp_dir, ['wheel'])
|
||||||
|
@ -221,7 +226,7 @@ def test_build_package_via_sdist_invalid_distribution(tmp_dir, package_test_setu
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
('args', 'output'),
|
('args', 'output'),
|
||||||
[
|
[
|
||||||
(
|
pytest.param(
|
||||||
[],
|
[],
|
||||||
[
|
[
|
||||||
'* Creating venv isolated environment...',
|
'* Creating venv isolated environment...',
|
||||||
|
@ -236,8 +241,10 @@ def test_build_package_via_sdist_invalid_distribution(tmp_dir, package_test_setu
|
||||||
'* Building wheel...',
|
'* Building wheel...',
|
||||||
'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
|
'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
|
||||||
],
|
],
|
||||||
|
id='via-sdist-isolation',
|
||||||
|
marks=[pytest.mark.network, pytest.mark.isolated],
|
||||||
),
|
),
|
||||||
(
|
pytest.param(
|
||||||
['--no-isolation'],
|
['--no-isolation'],
|
||||||
[
|
[
|
||||||
'* Getting build dependencies for sdist...',
|
'* Getting build dependencies for sdist...',
|
||||||
|
@ -247,8 +254,9 @@ def test_build_package_via_sdist_invalid_distribution(tmp_dir, package_test_setu
|
||||||
'* Building wheel...',
|
'* Building wheel...',
|
||||||
'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
|
'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
|
||||||
],
|
],
|
||||||
|
id='via-sdist-no-isolation',
|
||||||
),
|
),
|
||||||
(
|
pytest.param(
|
||||||
['--wheel'],
|
['--wheel'],
|
||||||
[
|
[
|
||||||
'* Creating venv isolated environment...',
|
'* Creating venv isolated environment...',
|
||||||
|
@ -258,24 +266,28 @@ def test_build_package_via_sdist_invalid_distribution(tmp_dir, package_test_setu
|
||||||
'* Building wheel...',
|
'* Building wheel...',
|
||||||
'Successfully built test_setuptools-1.0.0-py2.py3-none-any.whl',
|
'Successfully built test_setuptools-1.0.0-py2.py3-none-any.whl',
|
||||||
],
|
],
|
||||||
|
id='wheel-direct-isolation',
|
||||||
|
marks=[pytest.mark.network, pytest.mark.isolated],
|
||||||
),
|
),
|
||||||
(
|
pytest.param(
|
||||||
['--wheel', '--no-isolation'],
|
['--wheel', '--no-isolation'],
|
||||||
[
|
[
|
||||||
'* Getting build dependencies for wheel...',
|
'* Getting build dependencies for wheel...',
|
||||||
'* Building wheel...',
|
'* Building wheel...',
|
||||||
'Successfully built test_setuptools-1.0.0-py2.py3-none-any.whl',
|
'Successfully built test_setuptools-1.0.0-py2.py3-none-any.whl',
|
||||||
],
|
],
|
||||||
|
id='wheel-direct-no-isolation',
|
||||||
),
|
),
|
||||||
(
|
pytest.param(
|
||||||
['--sdist', '--no-isolation'],
|
['--sdist', '--no-isolation'],
|
||||||
[
|
[
|
||||||
'* Getting build dependencies for sdist...',
|
'* Getting build dependencies for sdist...',
|
||||||
'* Building sdist...',
|
'* Building sdist...',
|
||||||
'Successfully built test_setuptools-1.0.0.tar.gz',
|
'Successfully built test_setuptools-1.0.0.tar.gz',
|
||||||
],
|
],
|
||||||
|
id='sdist-direct-no-isolation',
|
||||||
),
|
),
|
||||||
(
|
pytest.param(
|
||||||
['--sdist', '--wheel', '--no-isolation'],
|
['--sdist', '--wheel', '--no-isolation'],
|
||||||
[
|
[
|
||||||
'* Getting build dependencies for sdist...',
|
'* Getting build dependencies for sdist...',
|
||||||
|
@ -284,20 +296,13 @@ def test_build_package_via_sdist_invalid_distribution(tmp_dir, package_test_setu
|
||||||
'* Building wheel...',
|
'* Building wheel...',
|
||||||
'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
|
'Successfully built test_setuptools-1.0.0.tar.gz and test_setuptools-1.0.0-py2.py3-none-any.whl',
|
||||||
],
|
],
|
||||||
|
id='sdist-and-wheel-direct-no-isolation',
|
||||||
),
|
),
|
||||||
],
|
],
|
||||||
ids=[
|
|
||||||
'via-sdist-isolation',
|
|
||||||
'via-sdist-no-isolation',
|
|
||||||
'wheel-direct-isolation',
|
|
||||||
'wheel-direct-no-isolation',
|
|
||||||
'sdist-direct-no-isolation',
|
|
||||||
'sdist-and-wheel-direct-no-isolation',
|
|
||||||
],
|
|
||||||
)
|
)
|
||||||
@pytest.mark.flaky(reruns=5)
|
@pytest.mark.flaky(reruns=5)
|
||||||
def test_output(package_test_setuptools, tmp_dir, capsys, args, output):
|
def test_output(package_test_setuptools, tmp_dir, capsys, args, output):
|
||||||
build.__main__.main([package_test_setuptools, '-o', tmp_dir] + args)
|
build.__main__.main([package_test_setuptools, '-o', tmp_dir, *args])
|
||||||
stdout, stderr = capsys.readouterr()
|
stdout, stderr = capsys.readouterr()
|
||||||
assert stdout.splitlines() == output
|
assert stdout.splitlines() == output
|
||||||
|
|
||||||
|
@ -368,8 +373,10 @@ def test_output_env_subprocess_error(
|
||||||
assert stdout[:4] == stdout_body
|
assert stdout[:4] == stdout_body
|
||||||
assert stdout[-1].startswith(stdout_error)
|
assert stdout[-1].startswith(stdout_error)
|
||||||
|
|
||||||
assert len(stderr) == 1
|
# Newer versions of pip also color stderr - strip them if present
|
||||||
assert stderr[0].startswith('ERROR: Invalid requirement: ')
|
cleaned_stderr = ANSI_STRIP.sub('', '\n'.join(stderr)).strip()
|
||||||
|
assert len(cleaned_stderr.splitlines()) == 1
|
||||||
|
assert cleaned_stderr.startswith('ERROR: Invalid requirement: ')
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
|
@ -0,0 +1,11 @@
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from build.__main__ import _natural_language_list
|
||||||
|
|
||||||
|
|
||||||
|
def test_natural_language_list():
|
||||||
|
assert _natural_language_list(['one']) == 'one'
|
||||||
|
assert _natural_language_list(['one', 'two']) == 'one and two'
|
||||||
|
assert _natural_language_list(['one', 'two', 'three']) == 'one, two and three'
|
||||||
|
with pytest.raises(IndexError, match='no elements'):
|
||||||
|
_natural_language_list([])
|
|
@ -2,24 +2,18 @@
|
||||||
|
|
||||||
|
|
||||||
import copy
|
import copy
|
||||||
import importlib
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import pathlib
|
||||||
import sys
|
import sys
|
||||||
import textwrap
|
import textwrap
|
||||||
|
|
||||||
import pep517.wrappers
|
import pyproject_hooks
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import build
|
import build
|
||||||
|
|
||||||
|
from build import _importlib
|
||||||
if sys.version_info >= (3, 8): # pragma: no cover
|
|
||||||
from importlib import metadata as importlib_metadata
|
|
||||||
else: # pragma: no cover
|
|
||||||
import importlib_metadata
|
|
||||||
|
|
||||||
import pathlib
|
|
||||||
|
|
||||||
|
|
||||||
build_open_owner = 'builtins'
|
build_open_owner = 'builtins'
|
||||||
|
@ -31,7 +25,7 @@ DEFAULT_BACKEND = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class MockDistribution(importlib_metadata.Distribution):
|
class MockDistribution(_importlib.metadata.Distribution):
|
||||||
def locate_file(self, path): # pragma: no cover
|
def locate_file(self, path): # pragma: no cover
|
||||||
return ''
|
return ''
|
||||||
|
|
||||||
|
@ -49,7 +43,7 @@ class MockDistribution(importlib_metadata.Distribution):
|
||||||
return CircularMockDistribution()
|
return CircularMockDistribution()
|
||||||
elif name == 'nested_circular_dep':
|
elif name == 'nested_circular_dep':
|
||||||
return NestedCircularMockDistribution()
|
return NestedCircularMockDistribution()
|
||||||
raise importlib_metadata.PackageNotFoundError
|
raise _importlib.metadata.PackageNotFoundError
|
||||||
|
|
||||||
|
|
||||||
class ExtraMockDistribution(MockDistribution):
|
class ExtraMockDistribution(MockDistribution):
|
||||||
|
@ -60,13 +54,13 @@ class ExtraMockDistribution(MockDistribution):
|
||||||
Metadata-Version: 2.2
|
Metadata-Version: 2.2
|
||||||
Name: extras_dep
|
Name: extras_dep
|
||||||
Version: 1.0.0
|
Version: 1.0.0
|
||||||
Provides-Extra: extra_without_associated_deps
|
Provides-Extra: extra-without-associated-deps
|
||||||
Provides-Extra: extra_with_unmet_deps
|
Provides-Extra: extra-with_unmet-deps
|
||||||
Requires-Dist: unmet_dep; extra == 'extra_with_unmet_deps'
|
Requires-Dist: unmet_dep; extra == 'extra-with-unmet-deps'
|
||||||
Provides-Extra: extra_with_met_deps
|
Provides-Extra: extra-with-met-deps
|
||||||
Requires-Dist: extras_dep; extra == 'extra_with_met_deps'
|
Requires-Dist: extras_dep; extra == 'extra-with-met-deps'
|
||||||
Provides-Extra: recursive_extra_with_unmet_deps
|
Provides-Extra: recursive-extra-with-unmet-deps
|
||||||
Requires-Dist: recursive_dep; extra == 'recursive_extra_with_unmet_deps'
|
Requires-Dist: recursive_dep; extra == 'recursive-extra-with-unmet-deps'
|
||||||
"""
|
"""
|
||||||
).strip()
|
).strip()
|
||||||
|
|
||||||
|
@ -142,33 +136,33 @@ class NestedCircularMockDistribution(MockDistribution):
|
||||||
('requireless_dep', None),
|
('requireless_dep', None),
|
||||||
('extras_dep[undefined_extra]', None),
|
('extras_dep[undefined_extra]', None),
|
||||||
# would the wheel builder filter this out?
|
# would the wheel builder filter this out?
|
||||||
('extras_dep[extra_without_associated_deps]', None),
|
('extras_dep[extra-without-associated-deps]', None),
|
||||||
(
|
(
|
||||||
'extras_dep[extra_with_unmet_deps]',
|
'extras_dep[extra-with-unmet-deps]',
|
||||||
('extras_dep[extra_with_unmet_deps]', 'unmet_dep; extra == "extra_with_unmet_deps"'),
|
('extras_dep[extra-with-unmet-deps]', 'unmet_dep; extra == "extra-with-unmet-deps"'),
|
||||||
),
|
),
|
||||||
(
|
(
|
||||||
'extras_dep[recursive_extra_with_unmet_deps]',
|
'extras_dep[recursive-extra-with-unmet-deps]',
|
||||||
(
|
(
|
||||||
'extras_dep[recursive_extra_with_unmet_deps]',
|
'extras_dep[recursive-extra-with-unmet-deps]',
|
||||||
'recursive_dep; extra == "recursive_extra_with_unmet_deps"',
|
'recursive_dep; extra == "recursive-extra-with-unmet-deps"',
|
||||||
'recursive_unmet_dep',
|
'recursive_unmet_dep',
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
('extras_dep[extra_with_met_deps]', None),
|
('extras_dep[extra-with-met-deps]', None),
|
||||||
('missing_dep; python_version>"10"', None),
|
('missing_dep; python_version>"10"', None),
|
||||||
('missing_dep; python_version<="1"', None),
|
('missing_dep; python_version<="1"', None),
|
||||||
('missing_dep; python_version>="1"', ('missing_dep; python_version >= "1"',)),
|
('missing_dep; python_version>="1"', ('missing_dep; python_version >= "1"',)),
|
||||||
('extras_dep == 1.0.0', None),
|
('extras_dep == 1.0.0', None),
|
||||||
('extras_dep == 2.0.0', ('extras_dep==2.0.0',)),
|
('extras_dep == 2.0.0', ('extras_dep==2.0.0',)),
|
||||||
('extras_dep[extra_without_associated_deps] == 1.0.0', None),
|
('extras_dep[extra-without-associated-deps] == 1.0.0', None),
|
||||||
('extras_dep[extra_without_associated_deps] == 2.0.0', ('extras_dep[extra_without_associated_deps]==2.0.0',)),
|
('extras_dep[extra-without-associated-deps] == 2.0.0', ('extras_dep[extra-without-associated-deps]==2.0.0',)),
|
||||||
('prerelease_dep >= 1.0.0', None),
|
('prerelease_dep >= 1.0.0', None),
|
||||||
('circular_dep', None),
|
('circular_dep', None),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_check_dependency(monkeypatch, requirement_string, expected):
|
def test_check_dependency(monkeypatch, requirement_string, expected):
|
||||||
monkeypatch.setattr(importlib_metadata, 'Distribution', MockDistribution)
|
monkeypatch.setattr(_importlib.metadata, 'Distribution', MockDistribution)
|
||||||
assert next(build.check_dependency(requirement_string), None) == expected
|
assert next(build.check_dependency(requirement_string), None) == expected
|
||||||
|
|
||||||
|
|
||||||
|
@ -185,25 +179,25 @@ def test_bad_project(package_test_no_project):
|
||||||
|
|
||||||
|
|
||||||
def test_init(mocker, package_test_flit, package_legacy, test_no_permission, package_test_bad_syntax):
|
def test_init(mocker, package_test_flit, package_legacy, test_no_permission, package_test_bad_syntax):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller')
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller')
|
||||||
|
|
||||||
# correct flit pyproject.toml
|
# correct flit pyproject.toml
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
pep517.wrappers.Pep517HookCaller.assert_called_with(
|
pyproject_hooks.BuildBackendHookCaller.assert_called_with(
|
||||||
package_test_flit, 'flit_core.buildapi', backend_path=None, python_executable=sys.executable, runner=builder._runner
|
package_test_flit, 'flit_core.buildapi', backend_path=None, python_executable=sys.executable, runner=builder._runner
|
||||||
)
|
)
|
||||||
pep517.wrappers.Pep517HookCaller.reset_mock()
|
pyproject_hooks.BuildBackendHookCaller.reset_mock()
|
||||||
|
|
||||||
# custom python
|
# custom python
|
||||||
builder = build.ProjectBuilder(package_test_flit, python_executable='some-python')
|
builder = build.ProjectBuilder(package_test_flit, python_executable='some-python')
|
||||||
pep517.wrappers.Pep517HookCaller.assert_called_with(
|
pyproject_hooks.BuildBackendHookCaller.assert_called_with(
|
||||||
package_test_flit, 'flit_core.buildapi', backend_path=None, python_executable='some-python', runner=builder._runner
|
package_test_flit, 'flit_core.buildapi', backend_path=None, python_executable='some-python', runner=builder._runner
|
||||||
)
|
)
|
||||||
pep517.wrappers.Pep517HookCaller.reset_mock()
|
pyproject_hooks.BuildBackendHookCaller.reset_mock()
|
||||||
|
|
||||||
# FileNotFoundError
|
# FileNotFoundError
|
||||||
builder = build.ProjectBuilder(package_legacy)
|
builder = build.ProjectBuilder(package_legacy)
|
||||||
pep517.wrappers.Pep517HookCaller.assert_called_with(
|
pyproject_hooks.BuildBackendHookCaller.assert_called_with(
|
||||||
package_legacy,
|
package_legacy,
|
||||||
'setuptools.build_meta:__legacy__',
|
'setuptools.build_meta:__legacy__',
|
||||||
backend_path=None,
|
backend_path=None,
|
||||||
|
@ -221,13 +215,11 @@ def test_init(mocker, package_test_flit, package_legacy, test_no_permission, pac
|
||||||
build.ProjectBuilder(package_test_bad_syntax)
|
build.ProjectBuilder(package_test_bad_syntax)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('value', [b'something', 'something_else'])
|
def test_init_makes_source_dir_absolute(package_test_flit):
|
||||||
def test_python_executable(package_test_flit, value):
|
rel_dir = os.path.relpath(package_test_flit, os.getcwd())
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
assert not os.path.isabs(rel_dir)
|
||||||
|
builder = build.ProjectBuilder(rel_dir)
|
||||||
builder.python_executable = value
|
assert os.path.isabs(builder.source_dir)
|
||||||
assert builder.python_executable == value
|
|
||||||
assert builder._hook.python_executable == value
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize('distribution', ['wheel', 'sdist'])
|
@pytest.mark.parametrize('distribution', ['wheel', 'sdist'])
|
||||||
|
@ -256,15 +248,15 @@ def test_build_missing_backend(packages_path, distribution, tmpdir):
|
||||||
|
|
||||||
|
|
||||||
def test_check_dependencies(mocker, package_test_flit):
|
def test_check_dependencies(mocker, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller.get_requires_for_build_sdist')
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller.get_requires_for_build_sdist')
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller.get_requires_for_build_wheel')
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller.get_requires_for_build_wheel')
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
|
|
||||||
side_effects = [
|
side_effects = [
|
||||||
[],
|
[],
|
||||||
['something'],
|
['something'],
|
||||||
pep517.wrappers.BackendUnavailable,
|
pyproject_hooks.BackendUnavailable,
|
||||||
]
|
]
|
||||||
|
|
||||||
builder._hook.get_requires_for_build_sdist.side_effect = copy.copy(side_effects)
|
builder._hook.get_requires_for_build_sdist.side_effect = copy.copy(side_effects)
|
||||||
|
@ -285,23 +277,8 @@ def test_check_dependencies(mocker, package_test_flit):
|
||||||
not builder.check_dependencies('wheel')
|
not builder.check_dependencies('wheel')
|
||||||
|
|
||||||
|
|
||||||
def test_working_directory(tmp_dir):
|
|
||||||
assert os.path.realpath(os.curdir) != os.path.realpath(tmp_dir)
|
|
||||||
with build._working_directory(tmp_dir):
|
|
||||||
assert os.path.realpath(os.curdir) == os.path.realpath(tmp_dir)
|
|
||||||
|
|
||||||
|
|
||||||
def test_working_directory_exc_is_not_transformed(mocker, package_test_flit, tmp_dir):
|
|
||||||
mocker.patch('build._working_directory', side_effect=OSError)
|
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
|
||||||
with pytest.raises(OSError):
|
|
||||||
builder._call_backend('build_sdist', tmp_dir)
|
|
||||||
|
|
||||||
|
|
||||||
def test_build(mocker, package_test_flit, tmp_dir):
|
def test_build(mocker, package_test_flit, tmp_dir):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
mocker.patch('build._working_directory', autospec=True)
|
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
|
|
||||||
|
@ -310,23 +287,19 @@ def test_build(mocker, package_test_flit, tmp_dir):
|
||||||
|
|
||||||
assert builder.build('sdist', tmp_dir) == os.path.join(tmp_dir, 'dist.tar.gz')
|
assert builder.build('sdist', tmp_dir) == os.path.join(tmp_dir, 'dist.tar.gz')
|
||||||
builder._hook.build_sdist.assert_called_with(tmp_dir, None)
|
builder._hook.build_sdist.assert_called_with(tmp_dir, None)
|
||||||
build._working_directory.assert_called_with(package_test_flit)
|
|
||||||
|
|
||||||
assert builder.build('wheel', tmp_dir) == os.path.join(tmp_dir, 'dist.whl')
|
assert builder.build('wheel', tmp_dir) == os.path.join(tmp_dir, 'dist.whl')
|
||||||
builder._hook.build_wheel.assert_called_with(tmp_dir, None)
|
builder._hook.build_wheel.assert_called_with(tmp_dir, None)
|
||||||
build._working_directory.assert_called_with(package_test_flit)
|
|
||||||
|
|
||||||
with pytest.raises(build.BuildBackendException):
|
with pytest.raises(build.BuildBackendException):
|
||||||
build._working_directory.assert_called_with(package_test_flit)
|
|
||||||
builder.build('sdist', tmp_dir)
|
builder.build('sdist', tmp_dir)
|
||||||
|
|
||||||
with pytest.raises(build.BuildBackendException):
|
with pytest.raises(build.BuildBackendException):
|
||||||
build._working_directory.assert_called_with(package_test_flit)
|
|
||||||
builder.build('wheel', tmp_dir)
|
builder.build('wheel', tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
def test_default_backend(mocker, package_legacy):
|
def test_default_backend(mocker, package_legacy):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_legacy)
|
builder = build.ProjectBuilder(package_legacy)
|
||||||
|
|
||||||
|
@ -334,7 +307,7 @@ def test_default_backend(mocker, package_legacy):
|
||||||
|
|
||||||
|
|
||||||
def test_missing_backend(mocker, package_test_no_backend):
|
def test_missing_backend(mocker, package_test_no_backend):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_no_backend)
|
builder = build.ProjectBuilder(package_test_no_backend)
|
||||||
|
|
||||||
|
@ -342,21 +315,21 @@ def test_missing_backend(mocker, package_test_no_backend):
|
||||||
|
|
||||||
|
|
||||||
def test_missing_requires(mocker, package_test_no_requires):
|
def test_missing_requires(mocker, package_test_no_requires):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
with pytest.raises(build.BuildException):
|
with pytest.raises(build.BuildException):
|
||||||
build.ProjectBuilder(package_test_no_requires)
|
build.ProjectBuilder(package_test_no_requires)
|
||||||
|
|
||||||
|
|
||||||
def test_build_system_typo(mocker, package_test_typo):
|
def test_build_system_typo(mocker, package_test_typo):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
with pytest.warns(build.TypoWarning):
|
with pytest.warns(build.TypoWarning):
|
||||||
build.ProjectBuilder(package_test_typo)
|
build.ProjectBuilder(package_test_typo)
|
||||||
|
|
||||||
|
|
||||||
def test_missing_outdir(mocker, tmp_dir, package_test_flit):
|
def test_missing_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
||||||
|
@ -368,7 +341,7 @@ def test_missing_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
|
|
||||||
|
|
||||||
def test_relative_outdir(mocker, tmp_dir, package_test_flit):
|
def test_relative_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
||||||
|
@ -379,13 +352,13 @@ def test_relative_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
|
|
||||||
|
|
||||||
def test_build_not_dir_outdir(mocker, tmp_dir, package_test_flit):
|
def test_build_not_dir_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
||||||
out = os.path.join(tmp_dir, 'out')
|
out = os.path.join(tmp_dir, 'out')
|
||||||
|
|
||||||
open(out, 'a').close() # create empty file
|
open(out, 'a', encoding='utf-8').close() # create empty file
|
||||||
|
|
||||||
with pytest.raises(build.BuildException):
|
with pytest.raises(build.BuildException):
|
||||||
builder.build('sdist', out)
|
builder.build('sdist', out)
|
||||||
|
@ -395,7 +368,7 @@ def test_build_not_dir_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
def demo_pkg_inline(tmp_path_factory):
|
def demo_pkg_inline(tmp_path_factory):
|
||||||
# builds a wheel without any dependencies and with a console script demo-pkg-inline
|
# builds a wheel without any dependencies and with a console script demo-pkg-inline
|
||||||
tmp_path = tmp_path_factory.mktemp('demo-pkg-inline')
|
tmp_path = tmp_path_factory.mktemp('demo-pkg-inline')
|
||||||
builder = build.ProjectBuilder(srcdir=os.path.join(os.path.dirname(__file__), 'packages', 'inline'))
|
builder = build.ProjectBuilder(source_dir=os.path.join(os.path.dirname(__file__), 'packages', 'inline'))
|
||||||
out = tmp_path / 'dist'
|
out = tmp_path / 'dist'
|
||||||
builder.build('wheel', str(out))
|
builder.build('wheel', str(out))
|
||||||
return next(out.iterdir())
|
return next(out.iterdir())
|
||||||
|
@ -432,7 +405,7 @@ def test_build_with_dep_on_console_script(tmp_path, demo_pkg_inline, capfd, mock
|
||||||
'''
|
'''
|
||||||
)
|
)
|
||||||
(tmp_path / 'pyproject.toml').write_text(toml, encoding='UTF-8')
|
(tmp_path / 'pyproject.toml').write_text(toml, encoding='UTF-8')
|
||||||
(tmp_path / 'build.py').write_text(code)
|
(tmp_path / 'build.py').write_text(code, encoding='utf-8')
|
||||||
|
|
||||||
deps = {str(demo_pkg_inline)} # we patch the requires demo_pkg_inline to refer to the wheel -> we don't need index
|
deps = {str(demo_pkg_inline)} # we patch the requires demo_pkg_inline to refer to the wheel -> we don't need index
|
||||||
mocker.patch('build.ProjectBuilder.build_system_requires', new_callable=mocker.PropertyMock, return_value=deps)
|
mocker.patch('build.ProjectBuilder.build_system_requires', new_callable=mocker.PropertyMock, return_value=deps)
|
||||||
|
@ -449,29 +422,27 @@ def test_build_with_dep_on_console_script(tmp_path, demo_pkg_inline, capfd, mock
|
||||||
|
|
||||||
|
|
||||||
def test_prepare(mocker, tmp_dir, package_test_flit):
|
def test_prepare(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
mocker.patch('build._working_directory', autospec=True)
|
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
builder._hook.prepare_metadata_for_build_wheel.return_value = 'dist-1.0.dist-info'
|
builder._hook.prepare_metadata_for_build_wheel.return_value = 'dist-1.0.dist-info'
|
||||||
|
|
||||||
assert builder.prepare('wheel', tmp_dir) == os.path.join(tmp_dir, 'dist-1.0.dist-info')
|
assert builder.prepare('wheel', tmp_dir) == os.path.join(tmp_dir, 'dist-1.0.dist-info')
|
||||||
builder._hook.prepare_metadata_for_build_wheel.assert_called_with(tmp_dir, None, _allow_fallback=False)
|
builder._hook.prepare_metadata_for_build_wheel.assert_called_with(tmp_dir, None, _allow_fallback=False)
|
||||||
build._working_directory.assert_called_with(package_test_flit)
|
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_no_hook(mocker, tmp_dir, package_test_flit):
|
def test_prepare_no_hook(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
failure = pep517.wrappers.HookMissing('prepare_metadata_for_build_wheel')
|
failure = pyproject_hooks.HookMissing('prepare_metadata_for_build_wheel')
|
||||||
builder._hook.prepare_metadata_for_build_wheel.side_effect = failure
|
builder._hook.prepare_metadata_for_build_wheel.side_effect = failure
|
||||||
|
|
||||||
assert builder.prepare('wheel', tmp_dir) is None
|
assert builder.prepare('wheel', tmp_dir) is None
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_error(mocker, tmp_dir, package_test_flit):
|
def test_prepare_error(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
builder._hook.prepare_metadata_for_build_wheel.side_effect = Exception
|
builder._hook.prepare_metadata_for_build_wheel.side_effect = Exception
|
||||||
|
@ -481,19 +452,19 @@ def test_prepare_error(mocker, tmp_dir, package_test_flit):
|
||||||
|
|
||||||
|
|
||||||
def test_prepare_not_dir_outdir(mocker, tmp_dir, package_test_flit):
|
def test_prepare_not_dir_outdir(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
|
|
||||||
out = os.path.join(tmp_dir, 'out')
|
out = os.path.join(tmp_dir, 'out')
|
||||||
with open(out, 'w') as f:
|
with open(out, 'w', encoding='utf-8') as f:
|
||||||
f.write('Not a directory')
|
f.write('Not a directory')
|
||||||
with pytest.raises(build.BuildException, match='Build path .* exists and is not a directory'):
|
with pytest.raises(build.BuildException, match='Build path .* exists and is not a directory'):
|
||||||
builder.prepare('wheel', out)
|
builder.prepare('wheel', out)
|
||||||
|
|
||||||
|
|
||||||
def test_no_outdir_single(mocker, tmp_dir, package_test_flit):
|
def test_no_outdir_single(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller.prepare_metadata_for_build_wheel', return_value='')
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller.prepare_metadata_for_build_wheel', return_value='')
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
|
|
||||||
|
@ -504,7 +475,7 @@ def test_no_outdir_single(mocker, tmp_dir, package_test_flit):
|
||||||
|
|
||||||
|
|
||||||
def test_no_outdir_multiple(mocker, tmp_dir, package_test_flit):
|
def test_no_outdir_multiple(mocker, tmp_dir, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller.prepare_metadata_for_build_wheel', return_value='')
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller.prepare_metadata_for_build_wheel', return_value='')
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
builder = build.ProjectBuilder(package_test_flit)
|
||||||
|
|
||||||
|
@ -515,8 +486,9 @@ def test_no_outdir_multiple(mocker, tmp_dir, package_test_flit):
|
||||||
|
|
||||||
|
|
||||||
def test_runner_user_specified(tmp_dir, package_test_flit):
|
def test_runner_user_specified(tmp_dir, package_test_flit):
|
||||||
def dummy_runner(cmd, cwd=None, env=None):
|
def dummy_runner(cmd, cwd=None, extra_environ=None):
|
||||||
raise RuntimeError('Runner was called')
|
msg = 'Runner was called'
|
||||||
|
raise RuntimeError(msg)
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit, runner=dummy_runner)
|
builder = build.ProjectBuilder(package_test_flit, runner=dummy_runner)
|
||||||
with pytest.raises(build.BuildBackendException, match='Runner was called'):
|
with pytest.raises(build.BuildBackendException, match='Runner was called'):
|
||||||
|
@ -526,7 +498,7 @@ def test_runner_user_specified(tmp_dir, package_test_flit):
|
||||||
def test_metadata_path_no_prepare(tmp_dir, package_test_no_prepare):
|
def test_metadata_path_no_prepare(tmp_dir, package_test_no_prepare):
|
||||||
builder = build.ProjectBuilder(package_test_no_prepare)
|
builder = build.ProjectBuilder(package_test_no_prepare)
|
||||||
|
|
||||||
metadata = importlib_metadata.PathDistribution(
|
metadata = _importlib.metadata.PathDistribution(
|
||||||
pathlib.Path(builder.metadata_path(tmp_dir)),
|
pathlib.Path(builder.metadata_path(tmp_dir)),
|
||||||
).metadata
|
).metadata
|
||||||
|
|
||||||
|
@ -537,7 +509,7 @@ def test_metadata_path_no_prepare(tmp_dir, package_test_no_prepare):
|
||||||
def test_metadata_path_with_prepare(tmp_dir, package_test_setuptools):
|
def test_metadata_path_with_prepare(tmp_dir, package_test_setuptools):
|
||||||
builder = build.ProjectBuilder(package_test_setuptools)
|
builder = build.ProjectBuilder(package_test_setuptools)
|
||||||
|
|
||||||
metadata = importlib_metadata.PathDistribution(
|
metadata = _importlib.metadata.PathDistribution(
|
||||||
pathlib.Path(builder.metadata_path(tmp_dir)),
|
pathlib.Path(builder.metadata_path(tmp_dir)),
|
||||||
).metadata
|
).metadata
|
||||||
|
|
||||||
|
@ -548,7 +520,7 @@ def test_metadata_path_with_prepare(tmp_dir, package_test_setuptools):
|
||||||
def test_metadata_path_legacy(tmp_dir, package_legacy):
|
def test_metadata_path_legacy(tmp_dir, package_legacy):
|
||||||
builder = build.ProjectBuilder(package_legacy)
|
builder = build.ProjectBuilder(package_legacy)
|
||||||
|
|
||||||
metadata = importlib_metadata.PathDistribution(
|
metadata = _importlib.metadata.PathDistribution(
|
||||||
pathlib.Path(builder.metadata_path(tmp_dir)),
|
pathlib.Path(builder.metadata_path(tmp_dir)),
|
||||||
).metadata
|
).metadata
|
||||||
|
|
||||||
|
@ -563,33 +535,8 @@ def test_metadata_invalid_wheel(tmp_dir, package_test_bad_wheel):
|
||||||
builder.metadata_path(tmp_dir)
|
builder.metadata_path(tmp_dir)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_tomli_not_available(mocker):
|
|
||||||
loads = mocker.patch('tomli.loads')
|
|
||||||
mocker.patch.dict(sys.modules, {'tomli': None})
|
|
||||||
importlib.reload(build)
|
|
||||||
try:
|
|
||||||
yield
|
|
||||||
finally:
|
|
||||||
loads.assert_not_called()
|
|
||||||
mocker.stopall()
|
|
||||||
importlib.reload(build)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.skipif(sys.version_info >= (3, 11), reason='No need to test old toml support on 3.11+')
|
|
||||||
def test_toml_instead_of_tomli(mocker, mock_tomli_not_available, tmp_dir, package_test_flit):
|
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
|
||||||
|
|
||||||
builder = build.ProjectBuilder(package_test_flit)
|
|
||||||
builder._hook.build_sdist.return_value = 'dist.tar.gz'
|
|
||||||
|
|
||||||
builder.build('sdist', '.')
|
|
||||||
|
|
||||||
builder._hook.build_sdist.assert_called_with(os.path.abspath('.'), None)
|
|
||||||
|
|
||||||
|
|
||||||
def test_log(mocker, caplog, package_test_flit):
|
def test_log(mocker, caplog, package_test_flit):
|
||||||
mocker.patch('pep517.wrappers.Pep517HookCaller', autospec=True)
|
mocker.patch('pyproject_hooks.BuildBackendHookCaller', autospec=True)
|
||||||
mocker.patch('build.ProjectBuilder._call_backend', return_value='some_path')
|
mocker.patch('build.ProjectBuilder._call_backend', return_value='some_path')
|
||||||
caplog.set_level(logging.DEBUG)
|
caplog.set_level(logging.DEBUG)
|
||||||
|
|
||||||
|
@ -609,8 +556,6 @@ def test_log(mocker, caplog, package_test_flit):
|
||||||
('INFO', 'Building wheel...'),
|
('INFO', 'Building wheel...'),
|
||||||
('INFO', 'something'),
|
('INFO', 'something'),
|
||||||
]
|
]
|
||||||
if sys.version_info >= (3, 8): # stacklevel
|
|
||||||
assert caplog.records[-1].lineno == 602
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
|
|
|
@ -5,7 +5,7 @@ import sys
|
||||||
import tarfile
|
import tarfile
|
||||||
import zipfile
|
import zipfile
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path, PurePosixPath
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -13,32 +13,41 @@ import pytest
|
||||||
DIR = Path(__file__).parent.resolve()
|
DIR = Path(__file__).parent.resolve()
|
||||||
MAIN_DIR = DIR.parent
|
MAIN_DIR = DIR.parent
|
||||||
|
|
||||||
|
|
||||||
sdist_files = {
|
sdist_files = {
|
||||||
|
'.dockerignore',
|
||||||
|
'.gitignore',
|
||||||
|
'CHANGELOG.rst',
|
||||||
'LICENSE',
|
'LICENSE',
|
||||||
'PKG-INFO',
|
'PKG-INFO',
|
||||||
'README.md',
|
'README.md',
|
||||||
|
'docs/conf.py',
|
||||||
'pyproject.toml',
|
'pyproject.toml',
|
||||||
'setup.cfg',
|
|
||||||
'setup.py',
|
|
||||||
'src',
|
|
||||||
'src/build',
|
|
||||||
'src/build.egg-info',
|
|
||||||
'src/build.egg-info/PKG-INFO',
|
|
||||||
'src/build.egg-info/SOURCES.txt',
|
|
||||||
'src/build.egg-info/dependency_links.txt',
|
|
||||||
'src/build.egg-info/entry_points.txt',
|
|
||||||
'src/build.egg-info/requires.txt',
|
|
||||||
'src/build.egg-info/top_level.txt',
|
|
||||||
'src/build/__init__.py',
|
|
||||||
'src/build/__main__.py',
|
|
||||||
'src/build/env.py',
|
|
||||||
'src/build/py.typed',
|
'src/build/py.typed',
|
||||||
'src/build/util.py',
|
'tests/constraints.txt',
|
||||||
|
'tests/packages/test-cant-build-via-sdist/some-file-that-is-needed-for-build.txt',
|
||||||
|
'tests/packages/test-no-project/empty.txt',
|
||||||
|
'tox.ini',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
sdist_patterns = {
|
||||||
|
'docs/*.rst',
|
||||||
|
'src/build/*.py',
|
||||||
|
'tests/*.py',
|
||||||
|
'tests/packages/*/*.py',
|
||||||
|
'tests/packages/*/*/*.py',
|
||||||
|
'tests/packages/*/pyproject.toml',
|
||||||
|
'tests/packages/*/setup.*',
|
||||||
|
}
|
||||||
|
|
||||||
|
sdist_files |= {str(PurePosixPath(p.relative_to(MAIN_DIR))) for path in sdist_patterns for p in MAIN_DIR.glob(path)}
|
||||||
|
|
||||||
wheel_files = {
|
wheel_files = {
|
||||||
'build/__init__.py',
|
'build/__init__.py',
|
||||||
'build/__main__.py',
|
'build/__main__.py',
|
||||||
|
'build/_exceptions.py',
|
||||||
|
'build/_importlib.py',
|
||||||
|
'build/_util.py',
|
||||||
'build/env.py',
|
'build/env.py',
|
||||||
'build/py.typed',
|
'build/py.typed',
|
||||||
'build/util.py',
|
'build/util.py',
|
||||||
|
@ -47,12 +56,11 @@ wheel_files = {
|
||||||
'dist-info/RECORD',
|
'dist-info/RECORD',
|
||||||
'dist-info/WHEEL',
|
'dist-info/WHEEL',
|
||||||
'dist-info/entry_points.txt',
|
'dist-info/entry_points.txt',
|
||||||
'dist-info/top_level.txt',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.network
|
||||||
def test_build_sdist(monkeypatch, tmpdir):
|
def test_build_sdist(monkeypatch, tmpdir):
|
||||||
|
|
||||||
monkeypatch.chdir(MAIN_DIR)
|
monkeypatch.chdir(MAIN_DIR)
|
||||||
|
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
|
@ -65,19 +73,19 @@ def test_build_sdist(monkeypatch, tmpdir):
|
||||||
str(tmpdir),
|
str(tmpdir),
|
||||||
],
|
],
|
||||||
check=True,
|
check=True,
|
||||||
).stdout
|
)
|
||||||
|
|
||||||
(sdist,) = tmpdir.visit('*.tar.gz')
|
(sdist,) = tmpdir.visit('*.tar.gz')
|
||||||
|
|
||||||
with tarfile.open(str(sdist), 'r:gz') as tar:
|
with tarfile.open(str(sdist), 'r:gz') as tar:
|
||||||
simpler = {n.split('/', 1)[-1] for n in tar.getnames()[1:]}
|
simpler = {n.split('/', 1)[-1] for n in tar.getnames()}
|
||||||
|
|
||||||
assert simpler == sdist_files
|
assert simpler == sdist_files
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.network
|
||||||
@pytest.mark.parametrize('args', ((), ('--wheel',)), ids=('from_sdist', 'direct'))
|
@pytest.mark.parametrize('args', ((), ('--wheel',)), ids=('from_sdist', 'direct'))
|
||||||
def test_build_wheel(monkeypatch, tmpdir, args):
|
def test_build_wheel(monkeypatch, tmpdir, args):
|
||||||
|
|
||||||
monkeypatch.chdir(MAIN_DIR)
|
monkeypatch.chdir(MAIN_DIR)
|
||||||
|
|
||||||
subprocess.run(
|
subprocess.run(
|
||||||
|
|
|
@ -1,12 +1,14 @@
|
||||||
# SPDX-License-Identifier: MIT
|
# SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
import importlib.util
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
import build.util
|
import build.util
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.pypy3323bug
|
@pytest.mark.pypy3323bug
|
||||||
@pytest.mark.parametrize('isolated', [False, True])
|
@pytest.mark.parametrize('isolated', [False, pytest.param(True, marks=[pytest.mark.network, pytest.mark.isolated])])
|
||||||
def test_wheel_metadata(package_test_setuptools, isolated):
|
def test_wheel_metadata(package_test_setuptools, isolated):
|
||||||
metadata = build.util.project_wheel_metadata(package_test_setuptools, isolated)
|
metadata = build.util.project_wheel_metadata(package_test_setuptools, isolated)
|
||||||
|
|
||||||
|
@ -14,13 +16,10 @@ def test_wheel_metadata(package_test_setuptools, isolated):
|
||||||
assert metadata['version'] == '1.0.0'
|
assert metadata['version'] == '1.0.0'
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.network
|
||||||
@pytest.mark.pypy3323bug
|
@pytest.mark.pypy3323bug
|
||||||
def test_wheel_metadata_isolation(package_test_flit):
|
def test_wheel_metadata_isolation(package_test_flit):
|
||||||
try:
|
if importlib.util.find_spec('flit_core'):
|
||||||
import flit_core # noqa: F401
|
|
||||||
except ModuleNotFoundError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
pytest.xfail('flit_core is available -- we want it missing!') # pragma: no cover
|
pytest.xfail('flit_core is available -- we want it missing!') # pragma: no cover
|
||||||
|
|
||||||
metadata = build.util.project_wheel_metadata(package_test_flit)
|
metadata = build.util.project_wheel_metadata(package_test_flit)
|
||||||
|
@ -35,6 +34,7 @@ def test_wheel_metadata_isolation(package_test_flit):
|
||||||
build.util.project_wheel_metadata(package_test_flit, isolated=False)
|
build.util.project_wheel_metadata(package_test_flit, isolated=False)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.network
|
||||||
@pytest.mark.pypy3323bug
|
@pytest.mark.pypy3323bug
|
||||||
def test_with_get_requires(package_test_metadata):
|
def test_with_get_requires(package_test_metadata):
|
||||||
metadata = build.util.project_wheel_metadata(package_test_metadata)
|
metadata = build.util.project_wheel_metadata(package_test_metadata)
|
||||||
|
|
83
tox.ini
83
tox.ini
|
@ -1,30 +1,30 @@
|
||||||
[tox]
|
[tox]
|
||||||
envlist =
|
requires =
|
||||||
|
tox>=4.2
|
||||||
|
virtualenv>=20.0.34
|
||||||
|
env_list =
|
||||||
fix
|
fix
|
||||||
type
|
type
|
||||||
docs
|
docs
|
||||||
path
|
path
|
||||||
{py311, py310, py39, py38, py37, py36, pypy37, pypy38, pypy39}{, -min}
|
{py312, py311, py310, py39, py38, py37, pypy39, pypy38, pypy37}{, -min}
|
||||||
isolated_build = true
|
|
||||||
skip_missing_interpreters = true
|
skip_missing_interpreters = true
|
||||||
minversion = 3.14
|
|
||||||
requires =
|
|
||||||
virtualenv>=20.0.34
|
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
description =
|
description =
|
||||||
run test suite with {basepython}
|
run test suite with {basepython}
|
||||||
passenv =
|
extras =
|
||||||
|
test
|
||||||
|
pass_env =
|
||||||
LC_ALL
|
LC_ALL
|
||||||
PIP_*
|
PIP_*
|
||||||
PYTEST_*
|
PYTEST_*
|
||||||
TERM
|
TERM
|
||||||
setenv =
|
set_env =
|
||||||
COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
|
COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
|
||||||
TEST_STATUS_DIR = {envtmpdir}
|
|
||||||
PYPY3323BUG = 1
|
PYPY3323BUG = 1
|
||||||
extras =
|
PYTHONWARNDEFAULTENCODING = 1
|
||||||
test
|
TEST_STATUS_DIR = {envtmpdir}
|
||||||
commands =
|
commands =
|
||||||
pytest -ra --cov --cov-config pyproject.toml \
|
pytest -ra --cov --cov-config pyproject.toml \
|
||||||
--cov-report=html:{envdir}/htmlcov --cov-context=test \
|
--cov-report=html:{envdir}/htmlcov --cov-context=test \
|
||||||
|
@ -32,49 +32,52 @@ commands =
|
||||||
|
|
||||||
[testenv:fix]
|
[testenv:fix]
|
||||||
description = run static analysis and style checks
|
description = run static analysis and style checks
|
||||||
passenv =
|
base_python = python3.9
|
||||||
HOMEPATH
|
|
||||||
PROGRAMDATA
|
|
||||||
basepython = python3.9
|
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
pre-commit>=2
|
pre-commit>=2
|
||||||
|
pass_env =
|
||||||
|
HOMEPATH
|
||||||
|
PROGRAMDATA
|
||||||
commands =
|
commands =
|
||||||
pre-commit run --all-files --show-diff-on-failure
|
pre-commit run --all-files --show-diff-on-failure
|
||||||
python -c 'print("hint: run {envdir}/bin/pre-commit install to add checks as pre-commit hook")'
|
python -c 'print("hint: run {envdir}/bin/pre-commit install to add checks as pre-commit hook")'
|
||||||
|
|
||||||
[testenv:path]
|
|
||||||
description = verify build can run from source (bootstrap)
|
|
||||||
setenv =
|
|
||||||
PYTHONPATH = {toxinidir}/src
|
|
||||||
COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
|
|
||||||
commands_pre =
|
|
||||||
python -E -m pip uninstall -y build colorama
|
|
||||||
|
|
||||||
[testenv:type]
|
[testenv:type]
|
||||||
description = run type check on code base
|
description = run type check on code base
|
||||||
extras = typing
|
extras =
|
||||||
|
typing
|
||||||
|
set_env =
|
||||||
|
PYTHONWARNDEFAULTENCODING =
|
||||||
commands =
|
commands =
|
||||||
mypy
|
mypy
|
||||||
|
|
||||||
[testenv:{py311, py310, py39, py38, py37, py36, pypy37, pypy38, pypy39}-min]
|
|
||||||
description = check minimum versions required of all dependencies
|
|
||||||
skip_install = true
|
|
||||||
commands_pre =
|
|
||||||
pip install .[test] -c tests/constraints.txt
|
|
||||||
|
|
||||||
[testenv:docs]
|
[testenv:docs]
|
||||||
description = build documentations
|
description = build documentations
|
||||||
basepython = python3.8
|
base_python = python3.10
|
||||||
extras =
|
extras =
|
||||||
docs
|
docs
|
||||||
commands =
|
commands =
|
||||||
sphinx-build -n docs {envtmpdir} {posargs:-W}
|
sphinx-build -n docs {envtmpdir} {posargs:-W}
|
||||||
python -c 'print("Documentation available under file://{envtmpdir}/index.html")'
|
python -c 'print("Documentation available under file://{envtmpdir}/index.html")'
|
||||||
|
|
||||||
|
[testenv:path]
|
||||||
|
description = verify build can run from source (bootstrap)
|
||||||
|
set_env =
|
||||||
|
COVERAGE_FILE = {toxworkdir}/.coverage.{envname}
|
||||||
|
PYTHONPATH = {toxinidir}/src
|
||||||
|
commands_pre =
|
||||||
|
python -E -m pip uninstall -y build colorama
|
||||||
|
|
||||||
|
[testenv:{py312, py311, py310, py39, py38, py37, pypy37, pypy38, pypy39}-min]
|
||||||
|
description = check minimum versions required of all dependencies
|
||||||
|
skip_install = true
|
||||||
|
commands_pre =
|
||||||
|
pip install .[test] -c tests/constraints.txt
|
||||||
|
|
||||||
[testenv:dev]
|
[testenv:dev]
|
||||||
description = generate a DEV environment
|
description = generate a DEV environment
|
||||||
usedevelop = true
|
package = editable
|
||||||
deps =
|
deps =
|
||||||
virtualenv>=20.0.34
|
virtualenv>=20.0.34
|
||||||
extras =
|
extras =
|
||||||
|
@ -86,24 +89,20 @@ commands =
|
||||||
|
|
||||||
[testenv:coverage]
|
[testenv:coverage]
|
||||||
description = combine coverage from test environments
|
description = combine coverage from test environments
|
||||||
passenv =
|
|
||||||
DIFF_AGAINST
|
|
||||||
setenv =
|
|
||||||
skip_install = true
|
skip_install = true
|
||||||
deps =
|
deps =
|
||||||
coverage[toml]>=5.1
|
coverage[toml]>=5.1
|
||||||
diff_cover>=3
|
diff_cover>=3
|
||||||
parallel_show_output = true
|
parallel_show_output = true
|
||||||
|
pass_env =
|
||||||
|
DIFF_AGAINST
|
||||||
|
set_env =
|
||||||
commands =
|
commands =
|
||||||
coverage combine {toxworkdir}
|
coverage combine {toxworkdir}
|
||||||
coverage report --skip-covered --show-missing -i
|
coverage report --skip-covered --show-missing -i
|
||||||
coverage xml -o {toxworkdir}/coverage.xml -i
|
coverage xml -o {toxworkdir}/coverage.xml -i
|
||||||
coverage html -d {toxworkdir}/htmlcov -i
|
coverage html -d {toxworkdir}/htmlcov -i
|
||||||
python -m diff_cover.diff_cover_tool --compare-branch {env:DIFF_AGAINST:origin/main} {toxworkdir}/coverage.xml
|
python -m diff_cover.diff_cover_tool --compare-branch {env:DIFF_AGAINST:origin/main} {toxworkdir}/coverage.xml
|
||||||
depends = {py311, py310, py39, py38, py37, py36, pypy37, pypy38, pypy39}{,-min}, path
|
depends =
|
||||||
|
path
|
||||||
[flake8]
|
{py312, py311, py310, py39, py38, py37, pypy39, pypy38, pypy37}{, -min}
|
||||||
max-line-length = 127
|
|
||||||
max-complexity = 10
|
|
||||||
extend-ignore = E203
|
|
||||||
extend-select = B9
|
|
||||||
|
|
Loading…
Reference in New Issue