forked from openkylin/astroid
Compare commits
No commits in common. "openkylin/yangtze" and "pristine-tar" have entirely different histories.
openkylin/
...
pristine-t
|
@ -1,82 +0,0 @@
|
|||
[
|
||||
{
|
||||
"mails": [
|
||||
"cpopa@cloudbasesolutions.com",
|
||||
"pcmanticore@gmail.com"
|
||||
],
|
||||
"authoritative_mail": "pcmanticore@gmail.com",
|
||||
"name": "Claudiu Popa"
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"pierre.sassoulas@gmail.com",
|
||||
"pierre.sassoulas@cea.fr"
|
||||
],
|
||||
"authoritative_mail": "pierre.sassoulas@gmail.com",
|
||||
"name": "Pierre Sassoulas"
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"alexandre.fayolle@logilab.fr",
|
||||
"emile.anclin@logilab.fr",
|
||||
"david.douard@logilab.fr",
|
||||
"laura.medioni@logilab.fr",
|
||||
"anthony.truchet@logilab.fr",
|
||||
"alain.leufroy@logilab.fr",
|
||||
"julien.cristau@logilab.fr",
|
||||
"Adrien.DiMascio@logilab.fr",
|
||||
"emile@crater.logilab.fr",
|
||||
"sylvain.thenault@logilab.fr",
|
||||
"pierre-yves.david@logilab.fr",
|
||||
"nicolas.chauvat@logilab.fr",
|
||||
"afayolle.ml@free.fr",
|
||||
"aurelien.campeas@logilab.fr",
|
||||
"lmedioni@logilab.fr"
|
||||
],
|
||||
"authoritative_mail": "contact@logilab.fr",
|
||||
"name": "LOGILAB S.A. (Paris, FRANCE)"
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"moylop260@vauxoo.com"
|
||||
],
|
||||
"name": "Moises Lopez",
|
||||
"authoritative_mail": "moylop260@vauxoo.com"
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"nathaniel@google.com",
|
||||
"mbp@google.com",
|
||||
"tmarek@google.com",
|
||||
"shlomme@gmail.com",
|
||||
"balparda@google.com",
|
||||
"dlindquist@google.com"
|
||||
],
|
||||
"name": "Google, Inc."
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"ashley@awhetter.co.uk",
|
||||
"awhetter.2011@my.bristol.ac.uk",
|
||||
"asw@dneg.com",
|
||||
"AWhetter@users.noreply.github.com"
|
||||
],
|
||||
"name": "Ashley Whetter",
|
||||
"authoritative_mail": "ashley@awhetter.co.uk"
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"ville.skytta@iki.fi",
|
||||
"ville.skytta@upcloud.com"
|
||||
],
|
||||
"authoritative_mail": "ville.skytta@iki.fi",
|
||||
"name": "Ville Skyttä"
|
||||
},
|
||||
{
|
||||
"mails": [
|
||||
"66853113+pre-commit-ci[bot]@users.noreply.github.com"
|
||||
],
|
||||
"authoritative_mail": "bot@noreply.github.com",
|
||||
"name": "pre-commit-ci[bot]"
|
||||
}
|
||||
]
|
19
.coveragerc
19
.coveragerc
|
@ -1,19 +0,0 @@
|
|||
[paths]
|
||||
source =
|
||||
astroid
|
||||
|
||||
[report]
|
||||
include =
|
||||
astroid/*
|
||||
omit =
|
||||
*/tests/*
|
||||
exclude_lines =
|
||||
# Re-enable default pragma
|
||||
pragma: no cover
|
||||
|
||||
# Debug-only code
|
||||
def __repr__
|
||||
|
||||
# Type checking code not executed during pytest runs
|
||||
if TYPE_CHECKING:
|
||||
@overload
|
4
.flake8
4
.flake8
|
@ -1,4 +0,0 @@
|
|||
[flake8]
|
||||
extend-ignore = E203,E266,E501,C901,F401
|
||||
max-complexity = 20
|
||||
select = B,C,E,F,W,T4,B9
|
|
@ -1,3 +0,0 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
tidelift: "pypi/astroid"
|
|
@ -1,11 +0,0 @@
|
|||
### Steps to reproduce
|
||||
|
||||
1.
|
||||
2.
|
||||
3.
|
||||
|
||||
### Current behavior
|
||||
|
||||
### Expected behavior
|
||||
|
||||
### `python -c "from astroid import __pkginfo__; print(__pkginfo__.version)"` output
|
|
@ -1,34 +0,0 @@
|
|||
<!--
|
||||
|
||||
Thank you for submitting a PR to astroid!
|
||||
|
||||
To ease our work reviewing your PR, do make sure to mark the complete the following boxes.
|
||||
|
||||
-->
|
||||
|
||||
## Steps
|
||||
|
||||
- [ ] For new features or bug fixes, add a ChangeLog entry describing what your PR does.
|
||||
- [ ] Write a good description on what the PR does.
|
||||
|
||||
## Description
|
||||
|
||||
## Type of Changes
|
||||
|
||||
<!-- Leave the corresponding lines for the applicable type of change: -->
|
||||
|
||||
| | Type |
|
||||
| --- | ---------------------- |
|
||||
| ✓ | :bug: Bug fix |
|
||||
| ✓ | :sparkles: New feature |
|
||||
| ✓ | :hammer: Refactoring |
|
||||
| ✓ | :scroll: Docs |
|
||||
|
||||
## Related Issue
|
||||
|
||||
<!--
|
||||
If this PR fixes a particular issue, use the following to automatically close that issue
|
||||
once this PR gets merged:
|
||||
|
||||
Closes #XXX
|
||||
-->
|
|
@ -1 +0,0 @@
|
|||
Coordinated Disclosure Plan: https://tidelift.com/security
|
|
@ -1,399 +0,0 @@
|
|||
name: CI
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- 2.*
|
||||
pull_request: ~
|
||||
|
||||
env:
|
||||
CACHE_VERSION: 4
|
||||
DEFAULT_PYTHON: 3.8
|
||||
PRE_COMMIT_CACHE: ~/.cache/pre-commit
|
||||
|
||||
jobs:
|
||||
prepare-base:
|
||||
name: Prepare base dependencies
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
python-key: ${{ steps.generate-python-key.outputs.key }}
|
||||
pre-commit-key: ${{ steps.generate-pre-commit-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate-python-key
|
||||
run: >-
|
||||
echo "::set-output name=key::base-venv-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('setup.cfg', 'requirements_test.txt', 'requirements_test_min.txt',
|
||||
'requirements_test_brain.txt', 'requirements_test_pre_commit.txt') }}"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-python-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-base-venv-${{ env.CACHE_VERSION }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python -m pip install -U pip setuptools wheel
|
||||
pip install -U -r requirements_test.txt -r requirements_test_brain.txt
|
||||
pip install -e .
|
||||
- name: Generate pre-commit restore key
|
||||
id: generate-pre-commit-key
|
||||
run: >-
|
||||
echo "::set-output name=key::pre-commit-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('.pre-commit-config.yaml') }}"
|
||||
- name: Restore pre-commit environment
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.generate-pre-commit-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pre-commit-${{ env.CACHE_VERSION }}-
|
||||
- name: Install pre-commit dependencies
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pre-commit install --install-hooks
|
||||
|
||||
formatting:
|
||||
name: Run pre-commit checks
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare-base
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.prepare-base.outputs.python-key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python venv from cache"
|
||||
exit 1
|
||||
- name: Restore pre-commit environment
|
||||
id: cache-precommit
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: ${{ env.PRE_COMMIT_CACHE }}
|
||||
key: ${{ runner.os }}-${{ needs.prepare-base.outputs.pre-commit-key }}
|
||||
- name: Fail job if pre-commit cache restore failed
|
||||
if: steps.cache-precommit.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore pre-commit environment from cache"
|
||||
exit 1
|
||||
- name: Run formatting check
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pip install -e .
|
||||
pre-commit run pylint --all-files
|
||||
|
||||
prepare-tests-linux:
|
||||
name: Prepare tests for Python ${{ matrix.python-version }} (Linux)
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10"]
|
||||
outputs:
|
||||
python-key: ${{ steps.generate-python-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate-python-key
|
||||
run: >-
|
||||
echo "::set-output name=key::venv-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('setup.cfg', 'requirements_test.txt', 'requirements_test_min.txt',
|
||||
'requirements_test_brain.txt') }}"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-python-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ env.CACHE_VERSION }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python -m pip install -U pip setuptools wheel
|
||||
pip install -U -r requirements_test.txt -r requirements_test_brain.txt
|
||||
pip install -e .
|
||||
|
||||
pytest-linux:
|
||||
name: Run tests Python ${{ matrix.python-version }} (Linux)
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare-tests-linux
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.prepare-tests-linux.outputs.python-key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python venv from cache"
|
||||
exit 1
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest --cov --cov-report= tests/
|
||||
- name: Upload coverage artifact
|
||||
uses: actions/upload-artifact@v2.2.3
|
||||
with:
|
||||
name: coverage-${{ matrix.python-version }}
|
||||
path: .coverage
|
||||
|
||||
coverage:
|
||||
name: Process test coverage
|
||||
runs-on: ubuntu-latest
|
||||
needs: ["prepare-tests-linux", "pytest-linux"]
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.8]
|
||||
env:
|
||||
COVERAGERC_FILE: .coveragerc
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.prepare-tests-linux.outputs.python-key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python venv from cache"
|
||||
exit 1
|
||||
- name: Download all coverage artifacts
|
||||
uses: actions/download-artifact@v2.0.9
|
||||
- name: Combine coverage results
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coverage combine coverage*/.coverage
|
||||
coverage report --rcfile=${{ env.COVERAGERC_FILE }}
|
||||
- name: Upload coverage to Coveralls
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
coveralls --rcfile=${{ env.COVERAGERC_FILE }} --service=github
|
||||
|
||||
prepare-tests-windows:
|
||||
name: Prepare tests for Python ${{ matrix.python-version }} (Windows)
|
||||
runs-on: windows-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10"]
|
||||
outputs:
|
||||
python-key: ${{ steps.generate-python-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate-python-key
|
||||
run: >-
|
||||
echo "::set-output name=key::venv-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('setup.cfg', 'requirements_test_min.txt',
|
||||
'requirements_test_brain.txt') }}"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-python-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ env.CACHE_VERSION }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv\\Scripts\\activate
|
||||
python -m pip install -U pip setuptools wheel
|
||||
pip install -U -r requirements_test_min.txt -r requirements_test_brain.txt
|
||||
pip install -e .
|
||||
|
||||
pytest-windows:
|
||||
name: Run tests Python ${{ matrix.python-version }} (Windows)
|
||||
runs-on: windows-latest
|
||||
needs: prepare-tests-windows
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: [3.6, 3.7, 3.8, 3.9, "3.10"]
|
||||
steps:
|
||||
- name: Set temp directory
|
||||
run: echo "TEMP=$env:USERPROFILE\AppData\Local\Temp" >> $env:GITHUB_ENV
|
||||
# Workaround to set correct temp directory on Windows
|
||||
# https://github.com/actions/virtual-environments/issues/712
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.prepare-tests-windows.outputs.python-key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python venv from cache"
|
||||
exit 1
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv\\Scripts\\activate
|
||||
pytest tests/
|
||||
|
||||
prepare-tests-pypy:
|
||||
name: Prepare tests for Python ${{ matrix.python-version }}
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
matrix:
|
||||
python-version: ["pypy3"]
|
||||
outputs:
|
||||
python-key: ${{ steps.generate-python-key.outputs.key }}
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
with:
|
||||
fetch-depth: 0
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Generate partial Python venv restore key
|
||||
id: generate-python-key
|
||||
run: >-
|
||||
echo "::set-output name=key::venv-${{ env.CACHE_VERSION }}-${{
|
||||
hashFiles('setup.cfg', 'requirements_test_min.txt') }}"
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key: >-
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
steps.generate-python-key.outputs.key }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-venv-${{ env.CACHE_VERSION }}-
|
||||
- name: Create Python virtual environment
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
python -m pip install -U pip setuptools wheel
|
||||
pip install -U -r requirements_test_min.txt
|
||||
pip install -e .
|
||||
|
||||
pytest-pypy:
|
||||
name: Run tests Python ${{ matrix.python-version }}
|
||||
runs-on: ubuntu-latest
|
||||
needs: prepare-tests-pypy
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version: ["pypy3"]
|
||||
steps:
|
||||
- name: Check out code from GitHub
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.1
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Restore Python virtual environment
|
||||
id: cache-venv
|
||||
uses: actions/cache@v2.1.4
|
||||
with:
|
||||
path: venv
|
||||
key:
|
||||
${{ runner.os }}-${{ steps.python.outputs.python-version }}-${{
|
||||
needs.prepare-tests-pypy.outputs.python-key }}
|
||||
- name: Fail job if Python cache restore failed
|
||||
if: steps.cache-venv.outputs.cache-hit != 'true'
|
||||
run: |
|
||||
echo "Failed to restore Python venv from cache"
|
||||
exit 1
|
||||
- name: Run pytest
|
||||
run: |
|
||||
. venv/bin/activate
|
||||
pytest tests/
|
|
@ -1,71 +0,0 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
pull_request:
|
||||
# The branches below must be a subset of the branches above
|
||||
branches: [main]
|
||||
schedule:
|
||||
- cron: "30 21 * * 2"
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
actions: read
|
||||
contents: read
|
||||
security-events: write
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: ["python"]
|
||||
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
|
||||
# Learn more:
|
||||
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v1
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
# queries: ./path/to/local/query, your-org/your-repo/queries@main
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v1
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
||||
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
|
||||
# and modify them (or add more) to build your code if your project
|
||||
# uses a compiled language
|
||||
|
||||
#- run: |
|
||||
# make bootstrap
|
||||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v1
|
|
@ -1,37 +0,0 @@
|
|||
name: Release
|
||||
|
||||
on:
|
||||
release:
|
||||
types:
|
||||
- published
|
||||
|
||||
env:
|
||||
DEFAULT_PYTHON: 3.9
|
||||
|
||||
jobs:
|
||||
release-pypi:
|
||||
name: Upload release to PyPI
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out code from Github
|
||||
uses: actions/checkout@v2.3.4
|
||||
- name: Set up Python ${{ env.DEFAULT_PYTHON }}
|
||||
id: python
|
||||
uses: actions/setup-python@v2.2.2
|
||||
with:
|
||||
python-version: ${{ env.DEFAULT_PYTHON }}
|
||||
- name: Install requirements
|
||||
run: |
|
||||
python -m pip install -U pip twine wheel
|
||||
python -m pip install -U "setuptools>=56.0.0"
|
||||
- name: Build distributions
|
||||
run: |
|
||||
python setup.py sdist bdist_wheel
|
||||
- name: Upload to PyPI
|
||||
if: github.event_name == 'release' && startsWith(github.ref, 'refs/tags')
|
||||
env:
|
||||
TWINE_REPOSITORY: pypi
|
||||
TWINE_USERNAME: __token__
|
||||
TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }}
|
||||
run: |
|
||||
twine upload --verbose dist/*
|
|
@ -1,17 +0,0 @@
|
|||
.svn/
|
||||
.hg/
|
||||
.hgtags/
|
||||
*.py[cod]
|
||||
log
|
||||
build
|
||||
dist/
|
||||
astroid.egg-info/
|
||||
.idea
|
||||
.tox
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache/
|
||||
.eggs/
|
||||
.pytest_cache/
|
||||
.mypy_cache/
|
||||
venv
|
|
@ -1,88 +0,0 @@
|
|||
ci:
|
||||
skip: [pylint]
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.1.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
exclude: .github/|tests/testdata
|
||||
- id: end-of-file-fixer
|
||||
exclude: tests/testdata
|
||||
- repo: https://github.com/myint/autoflake
|
||||
rev: v1.4
|
||||
hooks:
|
||||
- id: autoflake
|
||||
exclude: tests/testdata|astroid/__init__.py|astroid/scoped_nodes.py|astroid/node_classes.py
|
||||
args:
|
||||
- --in-place
|
||||
- --remove-all-unused-imports
|
||||
- --expand-star-imports
|
||||
- --remove-duplicate-keys
|
||||
- --remove-unused-variables
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.31.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
exclude: tests/testdata
|
||||
args: [--py36-plus]
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.10.1
|
||||
hooks:
|
||||
- id: isort
|
||||
exclude: tests/testdata
|
||||
- repo: https://github.com/Pierre-Sassoulas/black-disable-checker/
|
||||
rev: 1.0.1
|
||||
hooks:
|
||||
- id: black-disable-checker
|
||||
exclude: tests/unittest_nodes_lineno.py
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 21.12b0
|
||||
hooks:
|
||||
- id: black
|
||||
args: [--safe, --quiet]
|
||||
exclude: tests/testdata
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 4.0.1
|
||||
hooks:
|
||||
- id: flake8
|
||||
additional_dependencies: [flake8-bugbear, flake8-typing-imports==1.11.0]
|
||||
exclude: tests/testdata|doc/conf.py|astroid/__init__.py
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: pylint
|
||||
name: pylint
|
||||
entry: pylint
|
||||
language: system
|
||||
types: [python]
|
||||
args: [
|
||||
"-rn",
|
||||
"-sn",
|
||||
"--rcfile=pylintrc",
|
||||
# "--load-plugins=pylint.extensions.docparams", We're not ready for that
|
||||
]
|
||||
exclude: tests/testdata|conf.py
|
||||
- repo: https://github.com/pre-commit/mirrors-mypy
|
||||
rev: v0.930
|
||||
hooks:
|
||||
- id: mypy
|
||||
name: mypy
|
||||
entry: mypy
|
||||
language: python
|
||||
types: [python]
|
||||
args: []
|
||||
require_serial: true
|
||||
additional_dependencies:
|
||||
[
|
||||
"types-pkg_resources==0.1.2",
|
||||
"types-six",
|
||||
"types-attrs",
|
||||
"types-python-dateutil",
|
||||
"types-typed-ast",
|
||||
]
|
||||
exclude: tests/testdata| # exclude everything, we're not ready
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v2.5.1
|
||||
hooks:
|
||||
- id: prettier
|
||||
args: [--prose-wrap=always, --print-width=88]
|
|
@ -1,11 +0,0 @@
|
|||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||
|
||||
version: 2
|
||||
|
||||
sphinx:
|
||||
configuration: doc/conf.py
|
||||
|
||||
python:
|
||||
version: 3.7
|
||||
install:
|
||||
- requirements: doc/requirements.txt
|
508
LICENSE
508
LICENSE
|
@ -1,508 +0,0 @@
|
|||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
Version 2.1, February 1999
|
||||
|
||||
Copyright (C) 1991, 1999 Free Software Foundation, Inc.
|
||||
51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
Everyone is permitted to copy and distribute verbatim copies
|
||||
of this license document, but changing it is not allowed.
|
||||
|
||||
[This is the first released version of the Lesser GPL. It also counts
|
||||
as the successor of the GNU Library Public License, version 2, hence
|
||||
the version number 2.1.]
|
||||
|
||||
Preamble
|
||||
|
||||
The licenses for most software are designed to take away your
|
||||
freedom to share and change it. By contrast, the GNU General Public
|
||||
Licenses are intended to guarantee your freedom to share and change
|
||||
free software--to make sure the software is free for all its users.
|
||||
|
||||
This license, the Lesser General Public License, applies to some
|
||||
specially designated software packages--typically libraries--of the
|
||||
Free Software Foundation and other authors who decide to use it. You
|
||||
can use it too, but we suggest you first think carefully about whether
|
||||
this license or the ordinary General Public License is the better
|
||||
strategy to use in any particular case, based on the explanations
|
||||
below.
|
||||
|
||||
When we speak of free software, we are referring to freedom of use,
|
||||
not price. Our General Public Licenses are designed to make sure that
|
||||
you have the freedom to distribute copies of free software (and charge
|
||||
for this service if you wish); that you receive source code or can get
|
||||
it if you want it; that you can change the software and use pieces of
|
||||
it in new free programs; and that you are informed that you can do
|
||||
these things.
|
||||
|
||||
To protect your rights, we need to make restrictions that forbid
|
||||
distributors to deny you these rights or to ask you to surrender these
|
||||
rights. These restrictions translate to certain responsibilities for
|
||||
you if you distribute copies of the library or if you modify it.
|
||||
|
||||
For example, if you distribute copies of the library, whether gratis
|
||||
or for a fee, you must give the recipients all the rights that we gave
|
||||
you. You must make sure that they, too, receive or can get the source
|
||||
code. If you link other code with the library, you must provide
|
||||
complete object files to the recipients, so that they can relink them
|
||||
with the library after making changes to the library and recompiling
|
||||
it. And you must show them these terms so they know their rights.
|
||||
|
||||
We protect your rights with a two-step method: (1) we copyright the
|
||||
library, and (2) we offer you this license, which gives you legal
|
||||
permission to copy, distribute and/or modify the library.
|
||||
|
||||
To protect each distributor, we want to make it very clear that
|
||||
there is no warranty for the free library. Also, if the library is
|
||||
modified by someone else and passed on, the recipients should know
|
||||
that what they have is not the original version, so that the original
|
||||
author's reputation will not be affected by problems that might be
|
||||
introduced by others.
|
||||
|
||||
Finally, software patents pose a constant threat to the existence of
|
||||
any free program. We wish to make sure that a company cannot
|
||||
effectively restrict the users of a free program by obtaining a
|
||||
restrictive license from a patent holder. Therefore, we insist that
|
||||
any patent license obtained for a version of the library must be
|
||||
consistent with the full freedom of use specified in this license.
|
||||
|
||||
Most GNU software, including some libraries, is covered by the
|
||||
ordinary GNU General Public License. This license, the GNU Lesser
|
||||
General Public License, applies to certain designated libraries, and
|
||||
is quite different from the ordinary General Public License. We use
|
||||
this license for certain libraries in order to permit linking those
|
||||
libraries into non-free programs.
|
||||
|
||||
When a program is linked with a library, whether statically or using
|
||||
a shared library, the combination of the two is legally speaking a
|
||||
combined work, a derivative of the original library. The ordinary
|
||||
General Public License therefore permits such linking only if the
|
||||
entire combination fits its criteria of freedom. The Lesser General
|
||||
Public License permits more lax criteria for linking other code with
|
||||
the library.
|
||||
|
||||
We call this license the "Lesser" General Public License because it
|
||||
does Less to protect the user's freedom than the ordinary General
|
||||
Public License. It also provides other free software developers Less
|
||||
of an advantage over competing non-free programs. These disadvantages
|
||||
are the reason we use the ordinary General Public License for many
|
||||
libraries. However, the Lesser license provides advantages in certain
|
||||
special circumstances.
|
||||
|
||||
For example, on rare occasions, there may be a special need to
|
||||
encourage the widest possible use of a certain library, so that it
|
||||
becomes a de-facto standard. To achieve this, non-free programs must
|
||||
be allowed to use the library. A more frequent case is that a free
|
||||
library does the same job as widely used non-free libraries. In this
|
||||
case, there is little to gain by limiting the free library to free
|
||||
software only, so we use the Lesser General Public License.
|
||||
|
||||
In other cases, permission to use a particular library in non-free
|
||||
programs enables a greater number of people to use a large body of
|
||||
free software. For example, permission to use the GNU C Library in
|
||||
non-free programs enables many more people to use the whole GNU
|
||||
operating system, as well as its variant, the GNU/Linux operating
|
||||
system.
|
||||
|
||||
Although the Lesser General Public License is Less protective of the
|
||||
users' freedom, it does ensure that the user of a program that is
|
||||
linked with the Library has the freedom and the wherewithal to run
|
||||
that program using a modified version of the Library.
|
||||
|
||||
The precise terms and conditions for copying, distribution and
|
||||
modification follow. Pay close attention to the difference between a
|
||||
"work based on the library" and a "work that uses the library". The
|
||||
former contains code derived from the library, whereas the latter must
|
||||
be combined with the library in order to run.
|
||||
|
||||
GNU LESSER GENERAL PUBLIC LICENSE
|
||||
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||
|
||||
0. This License Agreement applies to any software library or other
|
||||
program which contains a notice placed by the copyright holder or
|
||||
other authorized party saying it may be distributed under the terms of
|
||||
this Lesser General Public License (also called "this License").
|
||||
Each licensee is addressed as "you".
|
||||
|
||||
A "library" means a collection of software functions and/or data
|
||||
prepared so as to be conveniently linked with application programs
|
||||
(which use some of those functions and data) to form executables.
|
||||
|
||||
The "Library", below, refers to any such software library or work
|
||||
which has been distributed under these terms. A "work based on the
|
||||
Library" means either the Library or any derivative work under
|
||||
copyright law: that is to say, a work containing the Library or a
|
||||
portion of it, either verbatim or with modifications and/or translated
|
||||
straightforwardly into another language. (Hereinafter, translation is
|
||||
included without limitation in the term "modification".)
|
||||
|
||||
"Source code" for a work means the preferred form of the work for
|
||||
making modifications to it. For a library, complete source code means
|
||||
all the source code for all modules it contains, plus any associated
|
||||
interface definition files, plus the scripts used to control
|
||||
compilation and installation of the library.
|
||||
|
||||
Activities other than copying, distribution and modification are not
|
||||
covered by this License; they are outside its scope. The act of
|
||||
running a program using the Library is not restricted, and output from
|
||||
such a program is covered only if its contents constitute a work based
|
||||
on the Library (independent of the use of the Library in a tool for
|
||||
writing it). Whether that is true depends on what the Library does
|
||||
and what the program that uses the Library does.
|
||||
|
||||
1. You may copy and distribute verbatim copies of the Library's
|
||||
complete source code as you receive it, in any medium, provided that
|
||||
you conspicuously and appropriately publish on each copy an
|
||||
appropriate copyright notice and disclaimer of warranty; keep intact
|
||||
all the notices that refer to this License and to the absence of any
|
||||
warranty; and distribute a copy of this License along with the
|
||||
Library.
|
||||
|
||||
You may charge a fee for the physical act of transferring a copy,
|
||||
and you may at your option offer warranty protection in exchange for a
|
||||
fee.
|
||||
|
||||
2. You may modify your copy or copies of the Library or any portion
|
||||
of it, thus forming a work based on the Library, and copy and
|
||||
distribute such modifications or work under the terms of Section 1
|
||||
above, provided that you also meet all of these conditions:
|
||||
|
||||
a) The modified work must itself be a software library.
|
||||
|
||||
b) You must cause the files modified to carry prominent notices
|
||||
stating that you changed the files and the date of any change.
|
||||
|
||||
c) You must cause the whole of the work to be licensed at no
|
||||
charge to all third parties under the terms of this License.
|
||||
|
||||
d) If a facility in the modified Library refers to a function or a
|
||||
table of data to be supplied by an application program that uses
|
||||
the facility, other than as an argument passed when the facility
|
||||
is invoked, then you must make a good faith effort to ensure that,
|
||||
in the event an application does not supply such function or
|
||||
table, the facility still operates, and performs whatever part of
|
||||
its purpose remains meaningful.
|
||||
|
||||
(For example, a function in a library to compute square roots has
|
||||
a purpose that is entirely well-defined independent of the
|
||||
application. Therefore, Subsection 2d requires that any
|
||||
application-supplied function or table used by this function must
|
||||
be optional: if the application does not supply it, the square
|
||||
root function must still compute square roots.)
|
||||
|
||||
These requirements apply to the modified work as a whole. If
|
||||
identifiable sections of that work are not derived from the Library,
|
||||
and can be reasonably considered independent and separate works in
|
||||
themselves, then this License, and its terms, do not apply to those
|
||||
sections when you distribute them as separate works. But when you
|
||||
distribute the same sections as part of a whole which is a work based
|
||||
on the Library, the distribution of the whole must be on the terms of
|
||||
this License, whose permissions for other licensees extend to the
|
||||
entire whole, and thus to each and every part regardless of who wrote
|
||||
it.
|
||||
|
||||
Thus, it is not the intent of this section to claim rights or contest
|
||||
your rights to work written entirely by you; rather, the intent is to
|
||||
exercise the right to control the distribution of derivative or
|
||||
collective works based on the Library.
|
||||
|
||||
In addition, mere aggregation of another work not based on the Library
|
||||
with the Library (or with a work based on the Library) on a volume of
|
||||
a storage or distribution medium does not bring the other work under
|
||||
the scope of this License.
|
||||
|
||||
3. You may opt to apply the terms of the ordinary GNU General Public
|
||||
License instead of this License to a given copy of the Library. To do
|
||||
this, you must alter all the notices that refer to this License, so
|
||||
that they refer to the ordinary GNU General Public License, version 2,
|
||||
instead of to this License. (If a newer version than version 2 of the
|
||||
ordinary GNU General Public License has appeared, then you can specify
|
||||
that version instead if you wish.) Do not make any other change in
|
||||
these notices.
|
||||
|
||||
Once this change is made in a given copy, it is irreversible for
|
||||
that copy, so the ordinary GNU General Public License applies to all
|
||||
subsequent copies and derivative works made from that copy.
|
||||
|
||||
This option is useful when you wish to copy part of the code of
|
||||
the Library into a program that is not a library.
|
||||
|
||||
4. You may copy and distribute the Library (or a portion or
|
||||
derivative of it, under Section 2) in object code or executable form
|
||||
under the terms of Sections 1 and 2 above provided that you accompany
|
||||
it with the complete corresponding machine-readable source code, which
|
||||
must be distributed under the terms of Sections 1 and 2 above on a
|
||||
medium customarily used for software interchange.
|
||||
|
||||
If distribution of object code is made by offering access to copy
|
||||
from a designated place, then offering equivalent access to copy the
|
||||
source code from the same place satisfies the requirement to
|
||||
distribute the source code, even though third parties are not
|
||||
compelled to copy the source along with the object code.
|
||||
|
||||
5. A program that contains no derivative of any portion of the
|
||||
Library, but is designed to work with the Library by being compiled or
|
||||
linked with it, is called a "work that uses the Library". Such a
|
||||
work, in isolation, is not a derivative work of the Library, and
|
||||
therefore falls outside the scope of this License.
|
||||
|
||||
However, linking a "work that uses the Library" with the Library
|
||||
creates an executable that is a derivative of the Library (because it
|
||||
contains portions of the Library), rather than a "work that uses the
|
||||
library". The executable is therefore covered by this License.
|
||||
Section 6 states terms for distribution of such executables.
|
||||
|
||||
When a "work that uses the Library" uses material from a header file
|
||||
that is part of the Library, the object code for the work may be a
|
||||
derivative work of the Library even though the source code is not.
|
||||
Whether this is true is especially significant if the work can be
|
||||
linked without the Library, or if the work is itself a library. The
|
||||
threshold for this to be true is not precisely defined by law.
|
||||
|
||||
If such an object file uses only numerical parameters, data
|
||||
structure layouts and accessors, and small macros and small inline
|
||||
functions (ten lines or less in length), then the use of the object
|
||||
file is unrestricted, regardless of whether it is legally a derivative
|
||||
work. (Executables containing this object code plus portions of the
|
||||
Library will still fall under Section 6.)
|
||||
|
||||
Otherwise, if the work is a derivative of the Library, you may
|
||||
distribute the object code for the work under the terms of Section 6.
|
||||
Any executables containing that work also fall under Section 6,
|
||||
whether or not they are linked directly with the Library itself.
|
||||
|
||||
6. As an exception to the Sections above, you may also combine or
|
||||
link a "work that uses the Library" with the Library to produce a
|
||||
work containing portions of the Library, and distribute that work
|
||||
under terms of your choice, provided that the terms permit
|
||||
modification of the work for the customer's own use and reverse
|
||||
engineering for debugging such modifications.
|
||||
|
||||
You must give prominent notice with each copy of the work that the
|
||||
Library is used in it and that the Library and its use are covered by
|
||||
this License. You must supply a copy of this License. If the work
|
||||
during execution displays copyright notices, you must include the
|
||||
copyright notice for the Library among them, as well as a reference
|
||||
directing the user to the copy of this License. Also, you must do one
|
||||
of these things:
|
||||
|
||||
a) Accompany the work with the complete corresponding
|
||||
machine-readable source code for the Library including whatever
|
||||
changes were used in the work (which must be distributed under
|
||||
Sections 1 and 2 above); and, if the work is an executable linked
|
||||
with the Library, with the complete machine-readable "work that
|
||||
uses the Library", as object code and/or source code, so that the
|
||||
user can modify the Library and then relink to produce a modified
|
||||
executable containing the modified Library. (It is understood
|
||||
that the user who changes the contents of definitions files in the
|
||||
Library will not necessarily be able to recompile the application
|
||||
to use the modified definitions.)
|
||||
|
||||
b) Use a suitable shared library mechanism for linking with the
|
||||
Library. A suitable mechanism is one that (1) uses at run time a
|
||||
copy of the library already present on the user's computer system,
|
||||
rather than copying library functions into the executable, and (2)
|
||||
will operate properly with a modified version of the library, if
|
||||
the user installs one, as long as the modified version is
|
||||
interface-compatible with the version that the work was made with.
|
||||
|
||||
c) Accompany the work with a written offer, valid for at least
|
||||
three years, to give the same user the materials specified in
|
||||
Subsection 6a, above, for a charge no more than the cost of
|
||||
performing this distribution.
|
||||
|
||||
d) If distribution of the work is made by offering access to copy
|
||||
from a designated place, offer equivalent access to copy the above
|
||||
specified materials from the same place.
|
||||
|
||||
e) Verify that the user has already received a copy of these
|
||||
materials or that you have already sent this user a copy.
|
||||
|
||||
For an executable, the required form of the "work that uses the
|
||||
Library" must include any data and utility programs needed for
|
||||
reproducing the executable from it. However, as a special exception,
|
||||
the materials to be distributed need not include anything that is
|
||||
normally distributed (in either source or binary form) with the major
|
||||
components (compiler, kernel, and so on) of the operating system on
|
||||
which the executable runs, unless that component itself accompanies
|
||||
the executable.
|
||||
|
||||
It may happen that this requirement contradicts the license
|
||||
restrictions of other proprietary libraries that do not normally
|
||||
accompany the operating system. Such a contradiction means you cannot
|
||||
use both them and the Library together in an executable that you
|
||||
distribute.
|
||||
|
||||
7. You may place library facilities that are a work based on the
|
||||
Library side-by-side in a single library together with other library
|
||||
facilities not covered by this License, and distribute such a combined
|
||||
library, provided that the separate distribution of the work based on
|
||||
the Library and of the other library facilities is otherwise
|
||||
permitted, and provided that you do these two things:
|
||||
|
||||
a) Accompany the combined library with a copy of the same work
|
||||
based on the Library, uncombined with any other library
|
||||
facilities. This must be distributed under the terms of the
|
||||
Sections above.
|
||||
|
||||
b) Give prominent notice with the combined library of the fact
|
||||
that part of it is a work based on the Library, and explaining
|
||||
where to find the accompanying uncombined form of the same work.
|
||||
|
||||
8. You may not copy, modify, sublicense, link with, or distribute
|
||||
the Library except as expressly provided under this License. Any
|
||||
attempt otherwise to copy, modify, sublicense, link with, or
|
||||
distribute the Library is void, and will automatically terminate your
|
||||
rights under this License. However, parties who have received copies,
|
||||
or rights, from you under this License will not have their licenses
|
||||
terminated so long as such parties remain in full compliance.
|
||||
|
||||
9. You are not required to accept this License, since you have not
|
||||
signed it. However, nothing else grants you permission to modify or
|
||||
distribute the Library or its derivative works. These actions are
|
||||
prohibited by law if you do not accept this License. Therefore, by
|
||||
modifying or distributing the Library (or any work based on the
|
||||
Library), you indicate your acceptance of this License to do so, and
|
||||
all its terms and conditions for copying, distributing or modifying
|
||||
the Library or works based on it.
|
||||
|
||||
10. Each time you redistribute the Library (or any work based on the
|
||||
Library), the recipient automatically receives a license from the
|
||||
original licensor to copy, distribute, link with or modify the Library
|
||||
subject to these terms and conditions. You may not impose any further
|
||||
restrictions on the recipients' exercise of the rights granted herein.
|
||||
You are not responsible for enforcing compliance by third parties with
|
||||
this License.
|
||||
|
||||
11. If, as a consequence of a court judgment or allegation of patent
|
||||
infringement or for any other reason (not limited to patent issues),
|
||||
conditions are imposed on you (whether by court order, agreement or
|
||||
otherwise) that contradict the conditions of this License, they do not
|
||||
excuse you from the conditions of this License. If you cannot
|
||||
distribute so as to satisfy simultaneously your obligations under this
|
||||
License and any other pertinent obligations, then as a consequence you
|
||||
may not distribute the Library at all. For example, if a patent
|
||||
license would not permit royalty-free redistribution of the Library by
|
||||
all those who receive copies directly or indirectly through you, then
|
||||
the only way you could satisfy both it and this License would be to
|
||||
refrain entirely from distribution of the Library.
|
||||
|
||||
If any portion of this section is held invalid or unenforceable under
|
||||
any particular circumstance, the balance of the section is intended to
|
||||
apply, and the section as a whole is intended to apply in other
|
||||
circumstances.
|
||||
|
||||
It is not the purpose of this section to induce you to infringe any
|
||||
patents or other property right claims or to contest validity of any
|
||||
such claims; this section has the sole purpose of protecting the
|
||||
integrity of the free software distribution system which is
|
||||
implemented by public license practices. Many people have made
|
||||
generous contributions to the wide range of software distributed
|
||||
through that system in reliance on consistent application of that
|
||||
system; it is up to the author/donor to decide if he or she is willing
|
||||
to distribute software through any other system and a licensee cannot
|
||||
impose that choice.
|
||||
|
||||
This section is intended to make thoroughly clear what is believed to
|
||||
be a consequence of the rest of this License.
|
||||
|
||||
12. If the distribution and/or use of the Library is restricted in
|
||||
certain countries either by patents or by copyrighted interfaces, the
|
||||
original copyright holder who places the Library under this License
|
||||
may add an explicit geographical distribution limitation excluding those
|
||||
countries, so that distribution is permitted only in or among
|
||||
countries not thus excluded. In such case, this License incorporates
|
||||
the limitation as if written in the body of this License.
|
||||
|
||||
13. The Free Software Foundation may publish revised and/or new
|
||||
versions of the Lesser General Public License from time to time.
|
||||
Such new versions will be similar in spirit to the present version,
|
||||
but may differ in detail to address new problems or concerns.
|
||||
|
||||
Each version is given a distinguishing version number. If the Library
|
||||
specifies a version number of this License which applies to it and
|
||||
"any later version", you have the option of following the terms and
|
||||
conditions either of that version or of any later version published by
|
||||
the Free Software Foundation. If the Library does not specify a
|
||||
license version number, you may choose any version ever published by
|
||||
the Free Software Foundation.
|
||||
|
||||
14. If you wish to incorporate parts of the Library into other free
|
||||
programs whose distribution conditions are incompatible with these,
|
||||
write to the author to ask for permission. For software which is
|
||||
copyrighted by the Free Software Foundation, write to the Free
|
||||
Software Foundation; we sometimes make exceptions for this. Our
|
||||
decision will be guided by the two goals of preserving the free status
|
||||
of all derivatives of our free software and of promoting the sharing
|
||||
and reuse of software generally.
|
||||
|
||||
NO WARRANTY
|
||||
|
||||
15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
|
||||
WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
|
||||
EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
|
||||
OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
|
||||
KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
|
||||
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
|
||||
LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
|
||||
THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||
|
||||
16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
|
||||
WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
|
||||
AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
|
||||
FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
|
||||
CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
|
||||
LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
|
||||
RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
|
||||
FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
|
||||
SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
|
||||
DAMAGES.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
How to Apply These Terms to Your New Libraries
|
||||
|
||||
If you develop a new library, and you want it to be of the greatest
|
||||
possible use to the public, we recommend making it free software that
|
||||
everyone can redistribute and change. You can do so by permitting
|
||||
redistribution under these terms (or, alternatively, under the terms
|
||||
of the ordinary General Public License).
|
||||
|
||||
To apply these terms, attach the following notices to the library.
|
||||
It is safest to attach them to the start of each source file to most
|
||||
effectively convey the exclusion of warranty; and each file should
|
||||
have at least the "copyright" line and a pointer to where the full
|
||||
notice is found.
|
||||
|
||||
|
||||
<one line to give the library's name and a brief idea of what it does.>
|
||||
Copyright (C) <year> <name of author>
|
||||
|
||||
This library is free software; you can redistribute it and/or
|
||||
modify it under the terms of the GNU Lesser General Public
|
||||
License as published by the Free Software Foundation; either
|
||||
version 2.1 of the License, or (at your option) any later version.
|
||||
|
||||
This library is distributed in the hope that it will be useful,
|
||||
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
Lesser General Public License for more details.
|
||||
|
||||
You should have received a copy of the GNU Lesser General Public
|
||||
License along with this library; if not, write to the Free Software
|
||||
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
Also add information on how to contact you by electronic and paper mail.
|
||||
|
||||
You should also get your employer (if you work as a programmer) or
|
||||
your school, if any, to sign a "copyright disclaimer" for the library,
|
||||
if necessary. Here is a sample; alter the names:
|
||||
|
||||
Yoyodyne, Inc., hereby disclaims all copyright interest in the
|
||||
library `Frob' (a library for tweaking knobs) written by James
|
||||
Random Hacker.
|
||||
|
||||
<signature of Ty Coon>, 1 April 1990
|
||||
Ty Coon, President of Vice
|
||||
|
||||
That's all there is to it!
|
|
@ -1,9 +0,0 @@
|
|||
prune .github
|
||||
prune doc
|
||||
prune tests
|
||||
exclude .*
|
||||
exclude ChangeLog
|
||||
exclude pylintrc
|
||||
exclude README.rst
|
||||
exclude requirements_*.txt
|
||||
exclude tox.ini
|
90
README.rst
90
README.rst
|
@ -1,90 +0,0 @@
|
|||
Astroid
|
||||
=======
|
||||
|
||||
.. image:: https://coveralls.io/repos/github/PyCQA/astroid/badge.svg?branch=main
|
||||
:target: https://coveralls.io/github/PyCQA/astroid?branch=main
|
||||
:alt: Coverage badge from coveralls.io
|
||||
|
||||
.. image:: https://readthedocs.org/projects/astroid/badge/?version=latest
|
||||
:target: http://astroid.readthedocs.io/en/latest/?badge=latest
|
||||
:alt: Documentation Status
|
||||
|
||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
||||
:target: https://github.com/ambv/black
|
||||
|
||||
.. image:: https://results.pre-commit.ci/badge/github/PyCQA/astroid/main.svg
|
||||
:target: https://results.pre-commit.ci/latest/github/PyCQA/astroid/main
|
||||
:alt: pre-commit.ci status
|
||||
|
||||
.. |tidelift_logo| image:: https://raw.githubusercontent.com/PyCQA/astroid/main/doc/media/Tidelift_Logos_RGB_Tidelift_Shorthand_On-White.png
|
||||
:width: 75
|
||||
:height: 60
|
||||
:alt: Tidelift
|
||||
|
||||
.. list-table::
|
||||
:widths: 10 100
|
||||
|
||||
* - |tidelift_logo|
|
||||
- Professional support for astroid is available as part of the
|
||||
`Tidelift Subscription`_. Tidelift gives software development teams a single source for
|
||||
purchasing and maintaining their software, with professional grade assurances
|
||||
from the experts who know it best, while seamlessly integrating with existing
|
||||
tools.
|
||||
|
||||
.. _Tidelift Subscription: https://tidelift.com/subscription/pkg/pypi-astroid?utm_source=pypi-astroid&utm_medium=referral&utm_campaign=readme
|
||||
|
||||
|
||||
|
||||
What's this?
|
||||
------------
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code. It is currently the library powering pylint's capabilities.
|
||||
|
||||
It provides a compatible representation which comes from the `_ast`
|
||||
module. It rebuilds the tree generated by the builtin _ast module by
|
||||
recursively walking down the AST and building an extended ast. The new
|
||||
node classes have additional methods and attributes for different
|
||||
usages. They include some support for static inference and local name
|
||||
scopes. Furthermore, astroid can also build partial trees by inspecting living
|
||||
objects.
|
||||
|
||||
|
||||
Installation
|
||||
------------
|
||||
|
||||
Extract the tarball, jump into the created directory and run::
|
||||
|
||||
pip install .
|
||||
|
||||
|
||||
If you want to do an editable installation, you can run::
|
||||
|
||||
pip install -e .
|
||||
|
||||
|
||||
If you have any questions, please mail the code-quality@python.org
|
||||
mailing list for support. See
|
||||
http://mail.python.org/mailman/listinfo/code-quality for subscription
|
||||
information and archives.
|
||||
|
||||
Documentation
|
||||
-------------
|
||||
http://astroid.readthedocs.io/en/latest/
|
||||
|
||||
|
||||
Python Versions
|
||||
---------------
|
||||
|
||||
astroid 2.0 is currently available for Python 3 only. If you want Python 2
|
||||
support, use an older version of astroid (though note that these versions
|
||||
are no longer supported).
|
||||
|
||||
Test
|
||||
----
|
||||
|
||||
Tests are in the 'test' subdirectory. To launch the whole tests suite, you can use
|
||||
either `tox` or `pytest`::
|
||||
|
||||
tox
|
||||
pytest astroid
|
|
@ -1,170 +0,0 @@
|
|||
# Copyright (c) 2006-2013, 2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Python Abstract Syntax Tree New Generation
|
||||
|
||||
The aim of this module is to provide a common base representation of
|
||||
python source code for projects such as pychecker, pyreverse,
|
||||
pylint... Well, actually the development of this library is essentially
|
||||
governed by pylint's needs.
|
||||
|
||||
It extends class defined in the python's _ast module with some
|
||||
additional methods and attributes. Instance attributes are added by a
|
||||
builder object, which can either generate extended ast (let's call
|
||||
them astroid ;) by visiting an existent ast tree or by inspecting living
|
||||
object. Methods are added by monkey patching ast classes.
|
||||
|
||||
Main modules are:
|
||||
|
||||
* nodes and scoped_nodes for more information about methods and
|
||||
attributes added to different node classes
|
||||
|
||||
* the manager contains a high level object to get astroid trees from
|
||||
source files and living objects. It maintains a cache of previously
|
||||
constructed tree for quick access
|
||||
|
||||
* builder contains the class responsible to build astroid trees
|
||||
"""
|
||||
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
|
||||
# isort: off
|
||||
# We have an isort: off on '__version__' because the packaging need to access
|
||||
# the version before the dependencies are installed (in particular 'wrapt'
|
||||
# that is imported in astroid.inference)
|
||||
from astroid.__pkginfo__ import __version__, version
|
||||
from astroid.nodes import node_classes, scoped_nodes
|
||||
|
||||
# isort: on
|
||||
|
||||
from astroid import inference, raw_building
|
||||
from astroid.astroid_manager import MANAGER
|
||||
from astroid.bases import BaseInstance, BoundMethod, Instance, UnboundMethod
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import extract_node, parse
|
||||
from astroid.const import Context, Del, Load, Store
|
||||
from astroid.exceptions import *
|
||||
from astroid.inference_tip import _inference_tip_cached, inference_tip
|
||||
from astroid.objects import ExceptionInstance
|
||||
|
||||
# isort: off
|
||||
# It's impossible to import from astroid.nodes with a wildcard, because
|
||||
# there is a cyclic import that prevent creating an __all__ in astroid/nodes
|
||||
# and we need astroid/scoped_nodes and astroid/node_classes to work. So
|
||||
# importing with a wildcard would clash with astroid/nodes/scoped_nodes
|
||||
# and astroid/nodes/node_classes.
|
||||
from astroid.nodes import ( # pylint: disable=redefined-builtin (Ellipsis)
|
||||
CONST_CLS,
|
||||
AnnAssign,
|
||||
Arguments,
|
||||
Assert,
|
||||
Assign,
|
||||
AssignAttr,
|
||||
AssignName,
|
||||
AsyncFor,
|
||||
AsyncFunctionDef,
|
||||
AsyncWith,
|
||||
Attribute,
|
||||
AugAssign,
|
||||
Await,
|
||||
BinOp,
|
||||
BoolOp,
|
||||
Break,
|
||||
Call,
|
||||
ClassDef,
|
||||
Compare,
|
||||
Comprehension,
|
||||
ComprehensionScope,
|
||||
Const,
|
||||
Continue,
|
||||
Decorators,
|
||||
DelAttr,
|
||||
Delete,
|
||||
DelName,
|
||||
Dict,
|
||||
DictComp,
|
||||
DictUnpack,
|
||||
Ellipsis,
|
||||
EmptyNode,
|
||||
EvaluatedObject,
|
||||
ExceptHandler,
|
||||
Expr,
|
||||
ExtSlice,
|
||||
For,
|
||||
FormattedValue,
|
||||
FunctionDef,
|
||||
GeneratorExp,
|
||||
Global,
|
||||
If,
|
||||
IfExp,
|
||||
Import,
|
||||
ImportFrom,
|
||||
Index,
|
||||
JoinedStr,
|
||||
Keyword,
|
||||
Lambda,
|
||||
List,
|
||||
ListComp,
|
||||
Match,
|
||||
MatchAs,
|
||||
MatchCase,
|
||||
MatchClass,
|
||||
MatchMapping,
|
||||
MatchOr,
|
||||
MatchSequence,
|
||||
MatchSingleton,
|
||||
MatchStar,
|
||||
MatchValue,
|
||||
Module,
|
||||
Name,
|
||||
NamedExpr,
|
||||
NodeNG,
|
||||
Nonlocal,
|
||||
Pass,
|
||||
Raise,
|
||||
Return,
|
||||
Set,
|
||||
SetComp,
|
||||
Slice,
|
||||
Starred,
|
||||
Subscript,
|
||||
TryExcept,
|
||||
TryFinally,
|
||||
Tuple,
|
||||
UnaryOp,
|
||||
Unknown,
|
||||
While,
|
||||
With,
|
||||
Yield,
|
||||
YieldFrom,
|
||||
are_exclusive,
|
||||
builtin_lookup,
|
||||
unpack_infer,
|
||||
function_to_method,
|
||||
)
|
||||
|
||||
# isort: on
|
||||
|
||||
from astroid.util import Uninferable
|
||||
|
||||
# load brain plugins
|
||||
ASTROID_INSTALL_DIRECTORY = Path(__file__).parent
|
||||
BRAIN_MODULES_DIRECTORY = ASTROID_INSTALL_DIRECTORY / "brain"
|
||||
for module in BRAIN_MODULES_DIRECTORY.iterdir():
|
||||
if module.suffix == ".py":
|
||||
import_module(f"astroid.brain.{module.stem}")
|
|
@ -1,28 +0,0 @@
|
|||
# Copyright (c) 2006-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2017 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
|
||||
# Copyright (c) 2016 Moises Lopez <moylop260@vauxoo.com>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2019 Uilian Ries <uilianries@gmail.com>
|
||||
# Copyright (c) 2019 Thomas Hisch <t.hisch@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 David Gilman <davidgilman1@gmail.com>
|
||||
# Copyright (c) 2020 Konrad Weihmann <kweihmann@outlook.com>
|
||||
# Copyright (c) 2020 Felix Mölder <felix.moelder@uni-due.de>
|
||||
# Copyright (c) 2020 Michael <michael-k@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
__version__ = "2.9.3"
|
||||
version = __version__
|
126
astroid/_ast.py
126
astroid/_ast.py
|
@ -1,126 +0,0 @@
|
|||
import ast
|
||||
import sys
|
||||
import types
|
||||
from collections import namedtuple
|
||||
from functools import partial
|
||||
from typing import Dict, Optional
|
||||
|
||||
from astroid.const import PY38_PLUS, Context
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
# On Python 3.8, typed_ast was merged back into `ast`
|
||||
_ast_py3: Optional[types.ModuleType] = ast
|
||||
else:
|
||||
try:
|
||||
import typed_ast.ast3 as _ast_py3
|
||||
except ImportError:
|
||||
_ast_py3 = None
|
||||
|
||||
FunctionType = namedtuple("FunctionType", ["argtypes", "returns"])
|
||||
|
||||
|
||||
class ParserModule(
|
||||
namedtuple(
|
||||
"ParserModule",
|
||||
[
|
||||
"module",
|
||||
"unary_op_classes",
|
||||
"cmp_op_classes",
|
||||
"bool_op_classes",
|
||||
"bin_op_classes",
|
||||
"context_classes",
|
||||
],
|
||||
)
|
||||
):
|
||||
def parse(self, string: str, type_comments=True):
|
||||
if self.module is _ast_py3:
|
||||
if PY38_PLUS:
|
||||
parse_func = partial(self.module.parse, type_comments=type_comments)
|
||||
else:
|
||||
parse_func = partial(
|
||||
self.module.parse, feature_version=sys.version_info.minor
|
||||
)
|
||||
else:
|
||||
parse_func = self.module.parse
|
||||
return parse_func(string)
|
||||
|
||||
|
||||
def parse_function_type_comment(type_comment: str) -> Optional[FunctionType]:
|
||||
"""Given a correct type comment, obtain a FunctionType object"""
|
||||
if _ast_py3 is None:
|
||||
return None
|
||||
|
||||
func_type = _ast_py3.parse(type_comment, "<type_comment>", "func_type") # type: ignore[attr-defined]
|
||||
return FunctionType(argtypes=func_type.argtypes, returns=func_type.returns)
|
||||
|
||||
|
||||
def get_parser_module(type_comments=True) -> ParserModule:
|
||||
parser_module = ast
|
||||
if type_comments and _ast_py3:
|
||||
parser_module = _ast_py3
|
||||
|
||||
unary_op_classes = _unary_operators_from_module(parser_module)
|
||||
cmp_op_classes = _compare_operators_from_module(parser_module)
|
||||
bool_op_classes = _bool_operators_from_module(parser_module)
|
||||
bin_op_classes = _binary_operators_from_module(parser_module)
|
||||
context_classes = _contexts_from_module(parser_module)
|
||||
|
||||
return ParserModule(
|
||||
parser_module,
|
||||
unary_op_classes,
|
||||
cmp_op_classes,
|
||||
bool_op_classes,
|
||||
bin_op_classes,
|
||||
context_classes,
|
||||
)
|
||||
|
||||
|
||||
def _unary_operators_from_module(module):
|
||||
return {module.UAdd: "+", module.USub: "-", module.Not: "not", module.Invert: "~"}
|
||||
|
||||
|
||||
def _binary_operators_from_module(module):
|
||||
binary_operators = {
|
||||
module.Add: "+",
|
||||
module.BitAnd: "&",
|
||||
module.BitOr: "|",
|
||||
module.BitXor: "^",
|
||||
module.Div: "/",
|
||||
module.FloorDiv: "//",
|
||||
module.MatMult: "@",
|
||||
module.Mod: "%",
|
||||
module.Mult: "*",
|
||||
module.Pow: "**",
|
||||
module.Sub: "-",
|
||||
module.LShift: "<<",
|
||||
module.RShift: ">>",
|
||||
}
|
||||
return binary_operators
|
||||
|
||||
|
||||
def _bool_operators_from_module(module):
|
||||
return {module.And: "and", module.Or: "or"}
|
||||
|
||||
|
||||
def _compare_operators_from_module(module):
|
||||
return {
|
||||
module.Eq: "==",
|
||||
module.Gt: ">",
|
||||
module.GtE: ">=",
|
||||
module.In: "in",
|
||||
module.Is: "is",
|
||||
module.IsNot: "is not",
|
||||
module.Lt: "<",
|
||||
module.LtE: "<=",
|
||||
module.NotEq: "!=",
|
||||
module.NotIn: "not in",
|
||||
}
|
||||
|
||||
|
||||
def _contexts_from_module(module) -> Dict[ast.expr_context, Context]:
|
||||
return {
|
||||
module.Load: Context.Load,
|
||||
module.Store: Context.Store,
|
||||
module.Del: Context.Del,
|
||||
module.Param: Context.Store,
|
||||
}
|
|
@ -1,316 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Tushar Sadhwani <86737547+tushar-deepsource@users.noreply.github.com>
|
||||
# Copyright (c) 2021 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
from typing import Optional
|
||||
|
||||
from astroid import nodes
|
||||
from astroid.bases import Instance
|
||||
from astroid.const import Context
|
||||
from astroid.context import CallContext, InferenceContext
|
||||
from astroid.exceptions import InferenceError, NoDefault
|
||||
from astroid.util import Uninferable
|
||||
|
||||
|
||||
class CallSite:
|
||||
"""Class for understanding arguments passed into a call site
|
||||
|
||||
It needs a call context, which contains the arguments and the
|
||||
keyword arguments that were passed into a given call site.
|
||||
In order to infer what an argument represents, call :meth:`infer_argument`
|
||||
with the corresponding function node and the argument name.
|
||||
|
||||
:param callcontext:
|
||||
An instance of :class:`astroid.context.CallContext`, that holds
|
||||
the arguments for the call site.
|
||||
:param argument_context_map:
|
||||
Additional contexts per node, passed in from :attr:`astroid.context.Context.extra_context`
|
||||
:param context:
|
||||
An instance of :class:`astroid.context.Context`.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self, callcontext: CallContext, argument_context_map=None, context=None
|
||||
):
|
||||
if argument_context_map is None:
|
||||
argument_context_map = {}
|
||||
self.argument_context_map = argument_context_map
|
||||
args = callcontext.args
|
||||
keywords = callcontext.keywords
|
||||
self.duplicated_keywords = set()
|
||||
self._unpacked_args = self._unpack_args(args, context=context)
|
||||
self._unpacked_kwargs = self._unpack_keywords(keywords, context=context)
|
||||
|
||||
self.positional_arguments = [
|
||||
arg for arg in self._unpacked_args if arg is not Uninferable
|
||||
]
|
||||
self.keyword_arguments = {
|
||||
key: value
|
||||
for key, value in self._unpacked_kwargs.items()
|
||||
if value is not Uninferable
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_call(cls, call_node, context: Optional[Context] = None):
|
||||
"""Get a CallSite object from the given Call node.
|
||||
|
||||
context will be used to force a single inference path.
|
||||
"""
|
||||
|
||||
# Determine the callcontext from the given `context` object if any.
|
||||
context = context or InferenceContext()
|
||||
callcontext = CallContext(call_node.args, call_node.keywords)
|
||||
return cls(callcontext, context=context)
|
||||
|
||||
def has_invalid_arguments(self):
|
||||
"""Check if in the current CallSite were passed *invalid* arguments
|
||||
|
||||
This can mean multiple things. For instance, if an unpacking
|
||||
of an invalid object was passed, then this method will return True.
|
||||
Other cases can be when the arguments can't be inferred by astroid,
|
||||
for example, by passing objects which aren't known statically.
|
||||
"""
|
||||
return len(self.positional_arguments) != len(self._unpacked_args)
|
||||
|
||||
def has_invalid_keywords(self):
|
||||
"""Check if in the current CallSite were passed *invalid* keyword arguments
|
||||
|
||||
For instance, unpacking a dictionary with integer keys is invalid
|
||||
(**{1:2}), because the keys must be strings, which will make this
|
||||
method to return True. Other cases where this might return True if
|
||||
objects which can't be inferred were passed.
|
||||
"""
|
||||
return len(self.keyword_arguments) != len(self._unpacked_kwargs)
|
||||
|
||||
def _unpack_keywords(self, keywords, context=None):
|
||||
values = {}
|
||||
context = context or InferenceContext()
|
||||
context.extra_context = self.argument_context_map
|
||||
for name, value in keywords:
|
||||
if name is None:
|
||||
# Then it's an unpacking operation (**)
|
||||
try:
|
||||
inferred = next(value.infer(context=context))
|
||||
except InferenceError:
|
||||
values[name] = Uninferable
|
||||
continue
|
||||
except StopIteration:
|
||||
continue
|
||||
|
||||
if not isinstance(inferred, nodes.Dict):
|
||||
# Not something we can work with.
|
||||
values[name] = Uninferable
|
||||
continue
|
||||
|
||||
for dict_key, dict_value in inferred.items:
|
||||
try:
|
||||
dict_key = next(dict_key.infer(context=context))
|
||||
except InferenceError:
|
||||
values[name] = Uninferable
|
||||
continue
|
||||
except StopIteration:
|
||||
continue
|
||||
if not isinstance(dict_key, nodes.Const):
|
||||
values[name] = Uninferable
|
||||
continue
|
||||
if not isinstance(dict_key.value, str):
|
||||
values[name] = Uninferable
|
||||
continue
|
||||
if dict_key.value in values:
|
||||
# The name is already in the dictionary
|
||||
values[dict_key.value] = Uninferable
|
||||
self.duplicated_keywords.add(dict_key.value)
|
||||
continue
|
||||
values[dict_key.value] = dict_value
|
||||
else:
|
||||
values[name] = value
|
||||
return values
|
||||
|
||||
def _unpack_args(self, args, context=None):
|
||||
values = []
|
||||
context = context or InferenceContext()
|
||||
context.extra_context = self.argument_context_map
|
||||
for arg in args:
|
||||
if isinstance(arg, nodes.Starred):
|
||||
try:
|
||||
inferred = next(arg.value.infer(context=context))
|
||||
except InferenceError:
|
||||
values.append(Uninferable)
|
||||
continue
|
||||
except StopIteration:
|
||||
continue
|
||||
|
||||
if inferred is Uninferable:
|
||||
values.append(Uninferable)
|
||||
continue
|
||||
if not hasattr(inferred, "elts"):
|
||||
values.append(Uninferable)
|
||||
continue
|
||||
values.extend(inferred.elts)
|
||||
else:
|
||||
values.append(arg)
|
||||
return values
|
||||
|
||||
def infer_argument(self, funcnode, name, context):
|
||||
"""infer a function argument value according to the call context
|
||||
|
||||
Arguments:
|
||||
funcnode: The function being called.
|
||||
name: The name of the argument whose value is being inferred.
|
||||
context: Inference context object
|
||||
"""
|
||||
if name in self.duplicated_keywords:
|
||||
raise InferenceError(
|
||||
"The arguments passed to {func!r} " " have duplicate keywords.",
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
|
||||
# Look into the keywords first, maybe it's already there.
|
||||
try:
|
||||
return self.keyword_arguments[name].infer(context)
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
# Too many arguments given and no variable arguments.
|
||||
if len(self.positional_arguments) > len(funcnode.args.args):
|
||||
if not funcnode.args.vararg and not funcnode.args.posonlyargs:
|
||||
raise InferenceError(
|
||||
"Too many positional arguments "
|
||||
"passed to {func!r} that does "
|
||||
"not have *args.",
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
|
||||
positional = self.positional_arguments[: len(funcnode.args.args)]
|
||||
vararg = self.positional_arguments[len(funcnode.args.args) :]
|
||||
argindex = funcnode.args.find_argname(name)[0]
|
||||
kwonlyargs = {arg.name for arg in funcnode.args.kwonlyargs}
|
||||
kwargs = {
|
||||
key: value
|
||||
for key, value in self.keyword_arguments.items()
|
||||
if key not in kwonlyargs
|
||||
}
|
||||
# If there are too few positionals compared to
|
||||
# what the function expects to receive, check to see
|
||||
# if the missing positional arguments were passed
|
||||
# as keyword arguments and if so, place them into the
|
||||
# positional args list.
|
||||
if len(positional) < len(funcnode.args.args):
|
||||
for func_arg in funcnode.args.args:
|
||||
if func_arg.name in kwargs:
|
||||
arg = kwargs.pop(func_arg.name)
|
||||
positional.append(arg)
|
||||
|
||||
if argindex is not None:
|
||||
boundnode = getattr(context, "boundnode", None)
|
||||
# 2. first argument of instance/class method
|
||||
if argindex == 0 and funcnode.type in {"method", "classmethod"}:
|
||||
# context.boundnode is None when an instance method is called with
|
||||
# the class, e.g. MyClass.method(obj, ...). In this case, self
|
||||
# is the first argument.
|
||||
if boundnode is None and funcnode.type == "method" and positional:
|
||||
return positional[0].infer(context=context)
|
||||
if boundnode is None:
|
||||
# XXX can do better ?
|
||||
boundnode = funcnode.parent.frame(future=True)
|
||||
|
||||
if isinstance(boundnode, nodes.ClassDef):
|
||||
# Verify that we're accessing a method
|
||||
# of the metaclass through a class, as in
|
||||
# `cls.metaclass_method`. In this case, the
|
||||
# first argument is always the class.
|
||||
method_scope = funcnode.parent.scope()
|
||||
if method_scope is boundnode.metaclass():
|
||||
return iter((boundnode,))
|
||||
|
||||
if funcnode.type == "method":
|
||||
if not isinstance(boundnode, Instance):
|
||||
boundnode = boundnode.instantiate_class()
|
||||
return iter((boundnode,))
|
||||
if funcnode.type == "classmethod":
|
||||
return iter((boundnode,))
|
||||
# if we have a method, extract one position
|
||||
# from the index, so we'll take in account
|
||||
# the extra parameter represented by `self` or `cls`
|
||||
if funcnode.type in {"method", "classmethod"} and boundnode:
|
||||
argindex -= 1
|
||||
# 2. search arg index
|
||||
try:
|
||||
return self.positional_arguments[argindex].infer(context)
|
||||
except IndexError:
|
||||
pass
|
||||
|
||||
if funcnode.args.kwarg == name:
|
||||
# It wants all the keywords that were passed into
|
||||
# the call site.
|
||||
if self.has_invalid_keywords():
|
||||
raise InferenceError(
|
||||
"Inference failed to find values for all keyword arguments "
|
||||
"to {func!r}: {unpacked_kwargs!r} doesn't correspond to "
|
||||
"{keyword_arguments!r}.",
|
||||
keyword_arguments=self.keyword_arguments,
|
||||
unpacked_kwargs=self._unpacked_kwargs,
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
kwarg = nodes.Dict(
|
||||
lineno=funcnode.args.lineno,
|
||||
col_offset=funcnode.args.col_offset,
|
||||
parent=funcnode.args,
|
||||
)
|
||||
kwarg.postinit(
|
||||
[(nodes.const_factory(key), value) for key, value in kwargs.items()]
|
||||
)
|
||||
return iter((kwarg,))
|
||||
if funcnode.args.vararg == name:
|
||||
# It wants all the args that were passed into
|
||||
# the call site.
|
||||
if self.has_invalid_arguments():
|
||||
raise InferenceError(
|
||||
"Inference failed to find values for all positional "
|
||||
"arguments to {func!r}: {unpacked_args!r} doesn't "
|
||||
"correspond to {positional_arguments!r}.",
|
||||
positional_arguments=self.positional_arguments,
|
||||
unpacked_args=self._unpacked_args,
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
||||
args = nodes.Tuple(
|
||||
lineno=funcnode.args.lineno,
|
||||
col_offset=funcnode.args.col_offset,
|
||||
parent=funcnode.args,
|
||||
)
|
||||
args.postinit(vararg)
|
||||
return iter((args,))
|
||||
|
||||
# Check if it's a default parameter.
|
||||
try:
|
||||
return funcnode.args.default_value(name).infer(context)
|
||||
except NoDefault:
|
||||
pass
|
||||
raise InferenceError(
|
||||
"No value found for argument {arg} to {func!r}",
|
||||
call_site=self,
|
||||
func=funcnode,
|
||||
arg=name,
|
||||
context=context,
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
"""
|
||||
This file contain the global astroid MANAGER, to prevent circular import that happened
|
||||
when the only possibility to import it was from astroid.__init__.py.
|
||||
|
||||
This AstroidManager is a singleton/borg so it's possible to instantiate an
|
||||
AstroidManager() directly.
|
||||
"""
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
MANAGER = AstroidManager()
|
600
astroid/bases.py
600
astroid/bases.py
|
@ -1,600 +0,0 @@
|
|||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2012 FELD Boris <lothiraldan@gmail.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016-2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2017 Calen Pennington <calen.pennington@gmail.com>
|
||||
# Copyright (c) 2018-2019 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018-2019 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Daniel Colascione <dancol@dancol.org>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Tushar Sadhwani <86737547+tushar-deepsource@users.noreply.github.com>
|
||||
# Copyright (c) 2021 pre-commit-ci[bot] <bot@noreply.github.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2021 doranid <ddandd@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""This module contains base classes and functions for the nodes and some
|
||||
inference utils.
|
||||
"""
|
||||
|
||||
import collections
|
||||
|
||||
from astroid import decorators
|
||||
from astroid.const import PY310_PLUS
|
||||
from astroid.context import (
|
||||
CallContext,
|
||||
InferenceContext,
|
||||
bind_context_to_node,
|
||||
copy_context,
|
||||
)
|
||||
from astroid.exceptions import (
|
||||
AstroidTypeError,
|
||||
AttributeInferenceError,
|
||||
InferenceError,
|
||||
NameInferenceError,
|
||||
)
|
||||
from astroid.util import Uninferable, lazy_descriptor, lazy_import
|
||||
|
||||
objectmodel = lazy_import("interpreter.objectmodel")
|
||||
helpers = lazy_import("helpers")
|
||||
manager = lazy_import("manager")
|
||||
|
||||
|
||||
# TODO: check if needs special treatment
|
||||
BOOL_SPECIAL_METHOD = "__bool__"
|
||||
BUILTINS = "builtins" # TODO Remove in 2.8
|
||||
|
||||
PROPERTIES = {"builtins.property", "abc.abstractproperty"}
|
||||
if PY310_PLUS:
|
||||
PROPERTIES.add("enum.property")
|
||||
|
||||
# List of possible property names. We use this list in order
|
||||
# to see if a method is a property or not. This should be
|
||||
# pretty reliable and fast, the alternative being to check each
|
||||
# decorator to see if its a real property-like descriptor, which
|
||||
# can be too complicated.
|
||||
# Also, these aren't qualified, because each project can
|
||||
# define them, we shouldn't expect to know every possible
|
||||
# property-like decorator!
|
||||
POSSIBLE_PROPERTIES = {
|
||||
"cached_property",
|
||||
"cachedproperty",
|
||||
"lazyproperty",
|
||||
"lazy_property",
|
||||
"reify",
|
||||
"lazyattribute",
|
||||
"lazy_attribute",
|
||||
"LazyProperty",
|
||||
"lazy",
|
||||
"cache_readonly",
|
||||
"DynamicClassAttribute",
|
||||
}
|
||||
|
||||
|
||||
def _is_property(meth, context=None):
|
||||
decoratornames = meth.decoratornames(context=context)
|
||||
if PROPERTIES.intersection(decoratornames):
|
||||
return True
|
||||
stripped = {
|
||||
name.split(".")[-1] for name in decoratornames if name is not Uninferable
|
||||
}
|
||||
if any(name in stripped for name in POSSIBLE_PROPERTIES):
|
||||
return True
|
||||
|
||||
# Lookup for subclasses of *property*
|
||||
if not meth.decorators:
|
||||
return False
|
||||
for decorator in meth.decorators.nodes or ():
|
||||
inferred = helpers.safe_infer(decorator, context=context)
|
||||
if inferred is None or inferred is Uninferable:
|
||||
continue
|
||||
if inferred.__class__.__name__ == "ClassDef":
|
||||
for base_class in inferred.bases:
|
||||
if base_class.__class__.__name__ != "Name":
|
||||
continue
|
||||
module, _ = base_class.lookup(base_class.name)
|
||||
if module.name == "builtins" and base_class.name == "property":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
class Proxy:
|
||||
"""a simple proxy object
|
||||
|
||||
Note:
|
||||
|
||||
Subclasses of this object will need a custom __getattr__
|
||||
if new instance attributes are created. See the Const class
|
||||
"""
|
||||
|
||||
_proxied = None # proxied object may be set by class or by instance
|
||||
|
||||
def __init__(self, proxied=None):
|
||||
if proxied is not None:
|
||||
self._proxied = proxied
|
||||
|
||||
def __getattr__(self, name):
|
||||
if name == "_proxied":
|
||||
return self.__class__._proxied
|
||||
if name in self.__dict__:
|
||||
return self.__dict__[name]
|
||||
return getattr(self._proxied, name)
|
||||
|
||||
def infer(self, context=None):
|
||||
yield self
|
||||
|
||||
|
||||
def _infer_stmts(stmts, context, frame=None):
|
||||
"""Return an iterator on statements inferred by each statement in *stmts*."""
|
||||
inferred = False
|
||||
if context is not None:
|
||||
name = context.lookupname
|
||||
context = context.clone()
|
||||
else:
|
||||
name = None
|
||||
context = InferenceContext()
|
||||
|
||||
for stmt in stmts:
|
||||
if stmt is Uninferable:
|
||||
yield stmt
|
||||
inferred = True
|
||||
continue
|
||||
context.lookupname = stmt._infer_name(frame, name)
|
||||
try:
|
||||
for inf in stmt.infer(context=context):
|
||||
yield inf
|
||||
inferred = True
|
||||
except NameInferenceError:
|
||||
continue
|
||||
except InferenceError:
|
||||
yield Uninferable
|
||||
inferred = True
|
||||
if not inferred:
|
||||
raise InferenceError(
|
||||
"Inference failed for all members of {stmts!r}.",
|
||||
stmts=stmts,
|
||||
frame=frame,
|
||||
context=context,
|
||||
)
|
||||
|
||||
|
||||
def _infer_method_result_truth(instance, method_name, context):
|
||||
# Get the method from the instance and try to infer
|
||||
# its return's truth value.
|
||||
meth = next(instance.igetattr(method_name, context=context), None)
|
||||
if meth and hasattr(meth, "infer_call_result"):
|
||||
if not meth.callable():
|
||||
return Uninferable
|
||||
try:
|
||||
context.callcontext = CallContext(args=[], callee=meth)
|
||||
for value in meth.infer_call_result(instance, context=context):
|
||||
if value is Uninferable:
|
||||
return value
|
||||
try:
|
||||
inferred = next(value.infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
return inferred.bool_value()
|
||||
except InferenceError:
|
||||
pass
|
||||
return Uninferable
|
||||
|
||||
|
||||
class BaseInstance(Proxy):
|
||||
"""An instance base class, which provides lookup methods for potential instances."""
|
||||
|
||||
special_attributes = None
|
||||
|
||||
def display_type(self):
|
||||
return "Instance of"
|
||||
|
||||
def getattr(self, name, context=None, lookupclass=True):
|
||||
try:
|
||||
values = self._proxied.instance_attr(name, context)
|
||||
except AttributeInferenceError as exc:
|
||||
if self.special_attributes and name in self.special_attributes:
|
||||
return [self.special_attributes.lookup(name)]
|
||||
|
||||
if lookupclass:
|
||||
# Class attributes not available through the instance
|
||||
# unless they are explicitly defined.
|
||||
return self._proxied.getattr(name, context, class_context=False)
|
||||
|
||||
raise AttributeInferenceError(
|
||||
target=self, attribute=name, context=context
|
||||
) from exc
|
||||
# since we've no context information, return matching class members as
|
||||
# well
|
||||
if lookupclass:
|
||||
try:
|
||||
return values + self._proxied.getattr(
|
||||
name, context, class_context=False
|
||||
)
|
||||
except AttributeInferenceError:
|
||||
pass
|
||||
return values
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
"""inferred getattr"""
|
||||
if not context:
|
||||
context = InferenceContext()
|
||||
try:
|
||||
context.lookupname = name
|
||||
# avoid recursively inferring the same attr on the same class
|
||||
if context.push(self._proxied):
|
||||
raise InferenceError(
|
||||
message="Cannot infer the same attribute again",
|
||||
node=self,
|
||||
context=context,
|
||||
)
|
||||
|
||||
# XXX frame should be self._proxied, or not ?
|
||||
get_attr = self.getattr(name, context, lookupclass=False)
|
||||
yield from _infer_stmts(
|
||||
self._wrap_attr(get_attr, context), context, frame=self
|
||||
)
|
||||
except AttributeInferenceError:
|
||||
try:
|
||||
# fallback to class.igetattr since it has some logic to handle
|
||||
# descriptors
|
||||
# But only if the _proxied is the Class.
|
||||
if self._proxied.__class__.__name__ != "ClassDef":
|
||||
raise
|
||||
attrs = self._proxied.igetattr(name, context, class_context=False)
|
||||
yield from self._wrap_attr(attrs, context)
|
||||
except AttributeInferenceError as error:
|
||||
raise InferenceError(**vars(error)) from error
|
||||
|
||||
def _wrap_attr(self, attrs, context=None):
|
||||
"""wrap bound methods of attrs in a InstanceMethod proxies"""
|
||||
for attr in attrs:
|
||||
if isinstance(attr, UnboundMethod):
|
||||
if _is_property(attr):
|
||||
yield from attr.infer_call_result(self, context)
|
||||
else:
|
||||
yield BoundMethod(attr, self)
|
||||
elif hasattr(attr, "name") and attr.name == "<lambda>":
|
||||
if attr.args.arguments and attr.args.arguments[0].name == "self":
|
||||
yield BoundMethod(attr, self)
|
||||
continue
|
||||
yield attr
|
||||
else:
|
||||
yield attr
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
"""infer what a class instance is returning when called"""
|
||||
context = bind_context_to_node(context, self)
|
||||
inferred = False
|
||||
for node in self._proxied.igetattr("__call__", context):
|
||||
if node is Uninferable or not node.callable():
|
||||
continue
|
||||
for res in node.infer_call_result(caller, context):
|
||||
inferred = True
|
||||
yield res
|
||||
if not inferred:
|
||||
raise InferenceError(node=self, caller=caller, context=context)
|
||||
|
||||
|
||||
class Instance(BaseInstance):
|
||||
"""A special node representing a class instance."""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = lazy_descriptor(lambda: objectmodel.InstanceModel())
|
||||
|
||||
def __repr__(self):
|
||||
return "<Instance of {}.{} at 0x{}>".format(
|
||||
self._proxied.root().name, self._proxied.name, id(self)
|
||||
)
|
||||
|
||||
def __str__(self):
|
||||
return f"Instance of {self._proxied.root().name}.{self._proxied.name}"
|
||||
|
||||
def callable(self):
|
||||
try:
|
||||
self._proxied.getattr("__call__", class_context=False)
|
||||
return True
|
||||
except AttributeInferenceError:
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return self._proxied.qname()
|
||||
|
||||
def display_type(self):
|
||||
return "Instance of"
|
||||
|
||||
def bool_value(self, context=None):
|
||||
"""Infer the truth value for an Instance
|
||||
|
||||
The truth value of an instance is determined by these conditions:
|
||||
|
||||
* if it implements __bool__ on Python 3 or __nonzero__
|
||||
on Python 2, then its bool value will be determined by
|
||||
calling this special method and checking its result.
|
||||
* when this method is not defined, __len__() is called, if it
|
||||
is defined, and the object is considered true if its result is
|
||||
nonzero. If a class defines neither __len__() nor __bool__(),
|
||||
all its instances are considered true.
|
||||
"""
|
||||
context = context or InferenceContext()
|
||||
context.boundnode = self
|
||||
|
||||
try:
|
||||
result = _infer_method_result_truth(self, BOOL_SPECIAL_METHOD, context)
|
||||
except (InferenceError, AttributeInferenceError):
|
||||
# Fallback to __len__.
|
||||
try:
|
||||
result = _infer_method_result_truth(self, "__len__", context)
|
||||
except (AttributeInferenceError, InferenceError):
|
||||
return True
|
||||
return result
|
||||
|
||||
def getitem(self, index, context=None):
|
||||
# TODO: Rewrap index to Const for this case
|
||||
new_context = bind_context_to_node(context, self)
|
||||
if not context:
|
||||
context = new_context
|
||||
method = next(self.igetattr("__getitem__", context=context), None)
|
||||
# Create a new CallContext for providing index as an argument.
|
||||
new_context.callcontext = CallContext(args=[index], callee=method)
|
||||
if not isinstance(method, BoundMethod):
|
||||
raise InferenceError(
|
||||
"Could not find __getitem__ for {node!r}.", node=self, context=context
|
||||
)
|
||||
if len(method.args.arguments) != 2: # (self, index)
|
||||
raise AstroidTypeError(
|
||||
"__getitem__ for {node!r} does not have correct signature",
|
||||
node=self,
|
||||
context=context,
|
||||
)
|
||||
return next(method.infer_call_result(self, new_context), None)
|
||||
|
||||
|
||||
class UnboundMethod(Proxy):
|
||||
"""a special node representing a method not bound to an instance"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = lazy_descriptor(lambda: objectmodel.UnboundMethodModel())
|
||||
|
||||
def __repr__(self):
|
||||
frame = self._proxied.parent.frame(future=True)
|
||||
return "<{} {} of {} at 0x{}".format(
|
||||
self.__class__.__name__, self._proxied.name, frame.qname(), id(self)
|
||||
)
|
||||
|
||||
def implicit_parameters(self):
|
||||
return 0
|
||||
|
||||
def is_bound(self):
|
||||
return False
|
||||
|
||||
def getattr(self, name, context=None):
|
||||
if name in self.special_attributes:
|
||||
return [self.special_attributes.lookup(name)]
|
||||
return self._proxied.getattr(name, context)
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
if name in self.special_attributes:
|
||||
return iter((self.special_attributes.lookup(name),))
|
||||
return self._proxied.igetattr(name, context)
|
||||
|
||||
def infer_call_result(self, caller, context):
|
||||
"""
|
||||
The boundnode of the regular context with a function called
|
||||
on ``object.__new__`` will be of type ``object``,
|
||||
which is incorrect for the argument in general.
|
||||
If no context is given the ``object.__new__`` call argument will
|
||||
correctly inferred except when inside a call that requires
|
||||
the additional context (such as a classmethod) of the boundnode
|
||||
to determine which class the method was called from
|
||||
"""
|
||||
|
||||
# If we're unbound method __new__ of builtin object, the result is an
|
||||
# instance of the class given as first argument.
|
||||
if (
|
||||
self._proxied.name == "__new__"
|
||||
and self._proxied.parent.frame(future=True).qname() == "builtins.object"
|
||||
):
|
||||
if caller.args:
|
||||
node_context = context.extra_context.get(caller.args[0])
|
||||
infer = caller.args[0].infer(context=node_context)
|
||||
else:
|
||||
infer = []
|
||||
return (Instance(x) if x is not Uninferable else x for x in infer)
|
||||
return self._proxied.infer_call_result(caller, context)
|
||||
|
||||
def bool_value(self, context=None):
|
||||
return True
|
||||
|
||||
|
||||
class BoundMethod(UnboundMethod):
|
||||
"""a special node representing a method bound to an instance"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = lazy_descriptor(lambda: objectmodel.BoundMethodModel())
|
||||
|
||||
def __init__(self, proxy, bound):
|
||||
super().__init__(proxy)
|
||||
self.bound = bound
|
||||
|
||||
def implicit_parameters(self):
|
||||
if self.name == "__new__":
|
||||
# __new__ acts as a classmethod but the class argument is not implicit.
|
||||
return 0
|
||||
return 1
|
||||
|
||||
def is_bound(self):
|
||||
return True
|
||||
|
||||
def _infer_type_new_call(self, caller, context):
|
||||
"""Try to infer what type.__new__(mcs, name, bases, attrs) returns.
|
||||
|
||||
In order for such call to be valid, the metaclass needs to be
|
||||
a subtype of ``type``, the name needs to be a string, the bases
|
||||
needs to be a tuple of classes
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.nodes import Pass
|
||||
|
||||
# Verify the metaclass
|
||||
try:
|
||||
mcs = next(caller.args[0].infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
if mcs.__class__.__name__ != "ClassDef":
|
||||
# Not a valid first argument.
|
||||
return None
|
||||
if not mcs.is_subtype_of("builtins.type"):
|
||||
# Not a valid metaclass.
|
||||
return None
|
||||
|
||||
# Verify the name
|
||||
try:
|
||||
name = next(caller.args[1].infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
if name.__class__.__name__ != "Const":
|
||||
# Not a valid name, needs to be a const.
|
||||
return None
|
||||
if not isinstance(name.value, str):
|
||||
# Needs to be a string.
|
||||
return None
|
||||
|
||||
# Verify the bases
|
||||
try:
|
||||
bases = next(caller.args[2].infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
if bases.__class__.__name__ != "Tuple":
|
||||
# Needs to be a tuple.
|
||||
return None
|
||||
try:
|
||||
inferred_bases = [next(elt.infer(context=context)) for elt in bases.elts]
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
if any(base.__class__.__name__ != "ClassDef" for base in inferred_bases):
|
||||
# All the bases needs to be Classes
|
||||
return None
|
||||
|
||||
# Verify the attributes.
|
||||
try:
|
||||
attrs = next(caller.args[3].infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
if attrs.__class__.__name__ != "Dict":
|
||||
# Needs to be a dictionary.
|
||||
return None
|
||||
cls_locals = collections.defaultdict(list)
|
||||
for key, value in attrs.items:
|
||||
try:
|
||||
key = next(key.infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
try:
|
||||
value = next(value.infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context) from e
|
||||
# Ignore non string keys
|
||||
if key.__class__.__name__ == "Const" and isinstance(key.value, str):
|
||||
cls_locals[key.value].append(value)
|
||||
|
||||
# Build the class from now.
|
||||
cls = mcs.__class__(
|
||||
name=name.value,
|
||||
lineno=caller.lineno,
|
||||
col_offset=caller.col_offset,
|
||||
parent=caller,
|
||||
)
|
||||
empty = Pass()
|
||||
cls.postinit(
|
||||
bases=bases.elts,
|
||||
body=[empty],
|
||||
decorators=[],
|
||||
newstyle=True,
|
||||
metaclass=mcs,
|
||||
keywords=[],
|
||||
)
|
||||
cls.locals = cls_locals
|
||||
return cls
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
context = bind_context_to_node(context, self.bound)
|
||||
if (
|
||||
self.bound.__class__.__name__ == "ClassDef"
|
||||
and self.bound.name == "type"
|
||||
and self.name == "__new__"
|
||||
and len(caller.args) == 4
|
||||
):
|
||||
# Check if we have a ``type.__new__(mcs, name, bases, attrs)`` call.
|
||||
new_cls = self._infer_type_new_call(caller, context)
|
||||
if new_cls:
|
||||
return iter((new_cls,))
|
||||
|
||||
return super().infer_call_result(caller, context)
|
||||
|
||||
def bool_value(self, context=None):
|
||||
return True
|
||||
|
||||
|
||||
class Generator(BaseInstance):
|
||||
"""a special node representing a generator.
|
||||
|
||||
Proxied class is set once for all in raw_building.
|
||||
"""
|
||||
|
||||
special_attributes = lazy_descriptor(objectmodel.GeneratorModel)
|
||||
|
||||
def __init__(self, parent=None, generator_initial_context=None):
|
||||
super().__init__()
|
||||
self.parent = parent
|
||||
self._call_context = copy_context(generator_initial_context)
|
||||
|
||||
@decorators.cached
|
||||
def infer_yield_types(self):
|
||||
yield from self.parent.infer_yield_result(self._call_context)
|
||||
|
||||
def callable(self):
|
||||
return False
|
||||
|
||||
def pytype(self):
|
||||
return "builtins.generator"
|
||||
|
||||
def display_type(self):
|
||||
return "Generator"
|
||||
|
||||
def bool_value(self, context=None):
|
||||
return True
|
||||
|
||||
def __repr__(self):
|
||||
return f"<Generator({self._proxied.name}) l.{self.lineno} at 0x{id(self)}>"
|
||||
|
||||
def __str__(self):
|
||||
return f"Generator({self._proxied.name})"
|
||||
|
||||
|
||||
class AsyncGenerator(Generator):
|
||||
"""Special node representing an async generator"""
|
||||
|
||||
def pytype(self):
|
||||
return "builtins.async_generator"
|
||||
|
||||
def display_type(self):
|
||||
return "AsyncGenerator"
|
||||
|
||||
def __repr__(self):
|
||||
return f"<AsyncGenerator({self._proxied.name}) l.{self.lineno} at 0x{id(self)}>"
|
||||
|
||||
def __str__(self):
|
||||
return f"AsyncGenerator({self._proxied.name})"
|
|
@ -1,35 +0,0 @@
|
|||
from astroid import arguments, inference_tip, nodes
|
||||
from astroid.exceptions import UseInferenceDefault
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def infer_namespace(node, context=None):
|
||||
callsite = arguments.CallSite.from_call(node, context=context)
|
||||
if not callsite.keyword_arguments:
|
||||
# Cannot make sense of it.
|
||||
raise UseInferenceDefault()
|
||||
|
||||
class_node = nodes.ClassDef("Namespace", "docstring")
|
||||
class_node.parent = node.parent
|
||||
for attr in set(callsite.keyword_arguments):
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return iter((class_node.instantiate_class(),))
|
||||
|
||||
|
||||
def _looks_like_namespace(node):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return (
|
||||
func.attrname == "Namespace"
|
||||
and isinstance(func.expr, nodes.Name)
|
||||
and func.expr.name == "argparse"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call, inference_tip(infer_namespace), _looks_like_namespace
|
||||
)
|
|
@ -1,77 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
"""
|
||||
Astroid hook for the attrs library
|
||||
|
||||
Without this hook pylint reports unsupported-assignment-operation
|
||||
for attrs classes
|
||||
"""
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import AnnAssign, Assign, AssignName, Call, Unknown
|
||||
from astroid.nodes.scoped_nodes import ClassDef
|
||||
|
||||
ATTRIB_NAMES = frozenset(("attr.ib", "attrib", "attr.attrib", "attr.field", "field"))
|
||||
ATTRS_NAMES = frozenset(
|
||||
(
|
||||
"attr.s",
|
||||
"attrs",
|
||||
"attr.attrs",
|
||||
"attr.attributes",
|
||||
"attr.define",
|
||||
"attr.mutable",
|
||||
"attr.frozen",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def is_decorated_with_attrs(node, decorator_names=ATTRS_NAMES):
|
||||
"""Return True if a decorated node has
|
||||
an attr decorator applied."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if isinstance(decorator_attribute, Call): # decorator with arguments
|
||||
decorator_attribute = decorator_attribute.func
|
||||
if decorator_attribute.as_string() in decorator_names:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def attr_attributes_transform(node: ClassDef) -> None:
|
||||
"""Given that the ClassNode has an attr decorator,
|
||||
rewrite class attributes as instance attributes
|
||||
"""
|
||||
# Astroid can't infer this attribute properly
|
||||
# Prevents https://github.com/PyCQA/pylint/issues/1884
|
||||
node.locals["__attrs_attrs__"] = [Unknown(parent=node)]
|
||||
|
||||
for cdef_body_node in node.body:
|
||||
if not isinstance(cdef_body_node, (Assign, AnnAssign)):
|
||||
continue
|
||||
if isinstance(cdef_body_node.value, Call):
|
||||
if cdef_body_node.value.func.as_string() not in ATTRIB_NAMES:
|
||||
continue
|
||||
else:
|
||||
continue
|
||||
targets = (
|
||||
cdef_body_node.targets
|
||||
if hasattr(cdef_body_node, "targets")
|
||||
else [cdef_body_node.target]
|
||||
)
|
||||
for target in targets:
|
||||
rhs_node = Unknown(
|
||||
lineno=cdef_body_node.lineno,
|
||||
col_offset=cdef_body_node.col_offset,
|
||||
parent=cdef_body_node,
|
||||
)
|
||||
if isinstance(target, AssignName):
|
||||
# Could be a subscript if the code analysed is
|
||||
# i = Optional[str] = ""
|
||||
# See https://github.com/PyCQA/pylint/issues/4439
|
||||
node.locals[target.name] = [rhs_node]
|
||||
node.instance_attrs[target.name] = [rhs_node]
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, attr_attributes_transform, is_decorated_with_attrs
|
||||
)
|
|
@ -1,29 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for understanding boto3.ServiceRequest()"""
|
||||
from astroid import extract_node
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.scoped_nodes import ClassDef
|
||||
|
||||
BOTO_SERVICE_FACTORY_QUALIFIED_NAME = "boto3.resources.base.ServiceResource"
|
||||
|
||||
|
||||
def service_request_transform(node):
|
||||
"""Transform ServiceResource to look like dynamic classes"""
|
||||
code = """
|
||||
def __getattr__(self, attr):
|
||||
return 0
|
||||
"""
|
||||
func_getattr = extract_node(code)
|
||||
node.locals["__getattr__"] = [func_getattr]
|
||||
return node
|
||||
|
||||
|
||||
def _looks_like_boto3_service_request(node):
|
||||
return node.qname() == BOTO_SERVICE_FACTORY_QUALIFIED_NAME
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, service_request_transform, _looks_like_boto3_service_request
|
||||
)
|
|
@ -1,930 +0,0 @@
|
|||
# Copyright (c) 2014-2021 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Rene Zhang <rz99@cornell.edu>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019-2020 Bryce Guinta <bryce.guinta@protonmail.com>
|
||||
# Copyright (c) 2019 Stanislav Levin <slev@altlinux.org>
|
||||
# Copyright (c) 2019 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2019 Frédéric Chapoton <fchapoton2@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 David Gilman <davidgilman1@gmail.com>
|
||||
# Copyright (c) 2020 Ram Rachum <ram@rachum.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for various builtins."""
|
||||
|
||||
from functools import partial
|
||||
|
||||
from astroid import arguments, helpers, inference_tip, nodes, objects, util
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import (
|
||||
AstroidTypeError,
|
||||
AttributeInferenceError,
|
||||
InferenceError,
|
||||
MroError,
|
||||
UseInferenceDefault,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes import scoped_nodes
|
||||
|
||||
OBJECT_DUNDER_NEW = "object.__new__"
|
||||
|
||||
STR_CLASS = """
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def format(self, *args, **kwargs):
|
||||
return {rvalue}
|
||||
def encode(self, encoding='ascii', errors=None):
|
||||
return b''
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
"""
|
||||
|
||||
|
||||
BYTES_CLASS = """
|
||||
class whatever(object):
|
||||
def join(self, iterable):
|
||||
return {rvalue}
|
||||
def replace(self, old, new, count=None):
|
||||
return {rvalue}
|
||||
def decode(self, encoding='ascii', errors=None):
|
||||
return u''
|
||||
def capitalize(self):
|
||||
return {rvalue}
|
||||
def title(self):
|
||||
return {rvalue}
|
||||
def lower(self):
|
||||
return {rvalue}
|
||||
def upper(self):
|
||||
return {rvalue}
|
||||
def swapcase(self):
|
||||
return {rvalue}
|
||||
def index(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def find(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def count(self, sub, start=None, end=None):
|
||||
return 0
|
||||
def strip(self, chars=None):
|
||||
return {rvalue}
|
||||
def lstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rstrip(self, chars=None):
|
||||
return {rvalue}
|
||||
def rjust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def center(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
def ljust(self, width, fillchar=None):
|
||||
return {rvalue}
|
||||
"""
|
||||
|
||||
|
||||
def _extend_string_class(class_node, code, rvalue):
|
||||
"""function to extend builtin str/unicode class"""
|
||||
code = code.format(rvalue=rvalue)
|
||||
fake = AstroidBuilder(AstroidManager()).string_build(code)["whatever"]
|
||||
for method in fake.mymethods():
|
||||
method.parent = class_node
|
||||
method.lineno = None
|
||||
method.col_offset = None
|
||||
if "__class__" in method.locals:
|
||||
method.locals["__class__"] = [class_node]
|
||||
class_node.locals[method.name] = [method]
|
||||
method.parent = class_node
|
||||
|
||||
|
||||
def _extend_builtins(class_transforms):
|
||||
builtin_ast = AstroidManager().builtins_module
|
||||
for class_name, transform in class_transforms.items():
|
||||
transform(builtin_ast[class_name])
|
||||
|
||||
|
||||
_extend_builtins(
|
||||
{
|
||||
"bytes": partial(_extend_string_class, code=BYTES_CLASS, rvalue="b''"),
|
||||
"str": partial(_extend_string_class, code=STR_CLASS, rvalue="''"),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _builtin_filter_predicate(node, builtin_name):
|
||||
if (
|
||||
builtin_name == "type"
|
||||
and node.root().name == "re"
|
||||
and isinstance(node.func, nodes.Name)
|
||||
and node.func.name == "type"
|
||||
and isinstance(node.parent, nodes.Assign)
|
||||
and len(node.parent.targets) == 1
|
||||
and isinstance(node.parent.targets[0], nodes.AssignName)
|
||||
and node.parent.targets[0].name in {"Pattern", "Match"}
|
||||
):
|
||||
# Handle re.Pattern and re.Match in brain_re
|
||||
# Match these patterns from stdlib/re.py
|
||||
# ```py
|
||||
# Pattern = type(...)
|
||||
# Match = type(...)
|
||||
# ```
|
||||
return False
|
||||
if isinstance(node.func, nodes.Name) and node.func.name == builtin_name:
|
||||
return True
|
||||
if isinstance(node.func, nodes.Attribute):
|
||||
return (
|
||||
node.func.attrname == "fromkeys"
|
||||
and isinstance(node.func.expr, nodes.Name)
|
||||
and node.func.expr.name == "dict"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
def register_builtin_transform(transform, builtin_name):
|
||||
"""Register a new transform function for the given *builtin_name*.
|
||||
|
||||
The transform function must accept two parameters, a node and
|
||||
an optional context.
|
||||
"""
|
||||
|
||||
def _transform_wrapper(node, context=None):
|
||||
result = transform(node, context=context)
|
||||
if result:
|
||||
if not result.parent:
|
||||
# Let the transformation function determine
|
||||
# the parent for its result. Otherwise,
|
||||
# we set it to be the node we transformed from.
|
||||
result.parent = node
|
||||
|
||||
if result.lineno is None:
|
||||
result.lineno = node.lineno
|
||||
# Can be a 'Module' see https://github.com/PyCQA/pylint/issues/4671
|
||||
# We don't have a regression test on this one: tread carefully
|
||||
if hasattr(result, "col_offset") and result.col_offset is None:
|
||||
result.col_offset = node.col_offset
|
||||
return iter([result])
|
||||
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call,
|
||||
inference_tip(_transform_wrapper),
|
||||
partial(_builtin_filter_predicate, builtin_name=builtin_name),
|
||||
)
|
||||
|
||||
|
||||
def _container_generic_inference(node, context, node_type, transform):
|
||||
args = node.args
|
||||
if not args:
|
||||
return node_type()
|
||||
if len(node.args) > 1:
|
||||
raise UseInferenceDefault()
|
||||
|
||||
(arg,) = args
|
||||
transformed = transform(arg)
|
||||
if not transformed:
|
||||
try:
|
||||
inferred = next(arg.infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if inferred is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
transformed = transform(inferred)
|
||||
if not transformed or transformed is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
return transformed
|
||||
|
||||
|
||||
def _container_generic_transform( # pylint: disable=inconsistent-return-statements
|
||||
arg, context, klass, iterables, build_elts
|
||||
):
|
||||
if isinstance(arg, klass):
|
||||
return arg
|
||||
if isinstance(arg, iterables):
|
||||
if all(isinstance(elt, nodes.Const) for elt in arg.elts):
|
||||
elts = [elt.value for elt in arg.elts]
|
||||
else:
|
||||
# TODO: Does not handle deduplication for sets.
|
||||
elts = []
|
||||
for element in arg.elts:
|
||||
if not element:
|
||||
continue
|
||||
inferred = helpers.safe_infer(element, context=context)
|
||||
if inferred:
|
||||
evaluated_object = nodes.EvaluatedObject(
|
||||
original=element, value=inferred
|
||||
)
|
||||
elts.append(evaluated_object)
|
||||
elif isinstance(arg, nodes.Dict):
|
||||
# Dicts need to have consts as strings already.
|
||||
if not all(isinstance(elt[0], nodes.Const) for elt in arg.items):
|
||||
raise UseInferenceDefault()
|
||||
elts = [item[0].value for item in arg.items]
|
||||
elif isinstance(arg, nodes.Const) and isinstance(arg.value, (str, bytes)):
|
||||
elts = arg.value
|
||||
else:
|
||||
return
|
||||
return klass.from_elements(elts=build_elts(elts))
|
||||
|
||||
|
||||
def _infer_builtin_container(
|
||||
node, context, klass=None, iterables=None, build_elts=None
|
||||
):
|
||||
transform_func = partial(
|
||||
_container_generic_transform,
|
||||
context=context,
|
||||
klass=klass,
|
||||
iterables=iterables,
|
||||
build_elts=build_elts,
|
||||
)
|
||||
|
||||
return _container_generic_inference(node, context, klass, transform_func)
|
||||
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
infer_tuple = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.Tuple,
|
||||
iterables=(
|
||||
nodes.List,
|
||||
nodes.Set,
|
||||
objects.FrozenSet,
|
||||
objects.DictItems,
|
||||
objects.DictKeys,
|
||||
objects.DictValues,
|
||||
),
|
||||
build_elts=tuple,
|
||||
)
|
||||
|
||||
infer_list = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.List,
|
||||
iterables=(
|
||||
nodes.Tuple,
|
||||
nodes.Set,
|
||||
objects.FrozenSet,
|
||||
objects.DictItems,
|
||||
objects.DictKeys,
|
||||
objects.DictValues,
|
||||
),
|
||||
build_elts=list,
|
||||
)
|
||||
|
||||
infer_set = partial(
|
||||
_infer_builtin_container,
|
||||
klass=nodes.Set,
|
||||
iterables=(nodes.List, nodes.Tuple, objects.FrozenSet, objects.DictKeys),
|
||||
build_elts=set,
|
||||
)
|
||||
|
||||
infer_frozenset = partial(
|
||||
_infer_builtin_container,
|
||||
klass=objects.FrozenSet,
|
||||
iterables=(nodes.List, nodes.Tuple, nodes.Set, objects.FrozenSet, objects.DictKeys),
|
||||
build_elts=frozenset,
|
||||
)
|
||||
|
||||
|
||||
def _get_elts(arg, context):
|
||||
def is_iterable(n):
|
||||
return isinstance(n, (nodes.List, nodes.Tuple, nodes.Set))
|
||||
|
||||
try:
|
||||
inferred = next(arg.infer(context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if isinstance(inferred, nodes.Dict):
|
||||
items = inferred.items
|
||||
elif is_iterable(inferred):
|
||||
items = []
|
||||
for elt in inferred.elts:
|
||||
# If an item is not a pair of two items,
|
||||
# then fallback to the default inference.
|
||||
# Also, take in consideration only hashable items,
|
||||
# tuples and consts. We are choosing Names as well.
|
||||
if not is_iterable(elt):
|
||||
raise UseInferenceDefault()
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault()
|
||||
if not isinstance(elt.elts[0], (nodes.Tuple, nodes.Const, nodes.Name)):
|
||||
raise UseInferenceDefault()
|
||||
items.append(tuple(elt.elts))
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
return items
|
||||
|
||||
|
||||
def infer_dict(node, context=None):
|
||||
"""Try to infer a dict call to a Dict node.
|
||||
|
||||
The function treats the following cases:
|
||||
|
||||
* dict()
|
||||
* dict(mapping)
|
||||
* dict(iterable)
|
||||
* dict(iterable, **kwargs)
|
||||
* dict(mapping, **kwargs)
|
||||
* dict(**kwargs)
|
||||
|
||||
If a case can't be inferred, we'll fallback to default inference.
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.has_invalid_arguments() or call.has_invalid_keywords():
|
||||
raise UseInferenceDefault
|
||||
|
||||
args = call.positional_arguments
|
||||
kwargs = list(call.keyword_arguments.items())
|
||||
|
||||
if not args and not kwargs:
|
||||
# dict()
|
||||
return nodes.Dict()
|
||||
if kwargs and not args:
|
||||
# dict(a=1, b=2, c=4)
|
||||
items = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
elif len(args) == 1 and kwargs:
|
||||
# dict(some_iterable, b=2, c=4)
|
||||
elts = _get_elts(args[0], context)
|
||||
keys = [(nodes.Const(key), value) for key, value in kwargs]
|
||||
items = elts + keys
|
||||
elif len(args) == 1:
|
||||
items = _get_elts(args[0], context)
|
||||
else:
|
||||
raise UseInferenceDefault()
|
||||
value = nodes.Dict(
|
||||
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
|
||||
)
|
||||
value.postinit(items)
|
||||
return value
|
||||
|
||||
|
||||
def infer_super(node, context=None):
|
||||
"""Understand super calls.
|
||||
|
||||
There are some restrictions for what can be understood:
|
||||
|
||||
* unbounded super (one argument form) is not understood.
|
||||
|
||||
* if the super call is not inside a function (classmethod or method),
|
||||
then the default inference will be used.
|
||||
|
||||
* if the super arguments can't be inferred, the default inference
|
||||
will be used.
|
||||
"""
|
||||
if len(node.args) == 1:
|
||||
# Ignore unbounded super.
|
||||
raise UseInferenceDefault
|
||||
|
||||
scope = node.scope()
|
||||
if not isinstance(scope, nodes.FunctionDef):
|
||||
# Ignore non-method uses of super.
|
||||
raise UseInferenceDefault
|
||||
if scope.type not in ("classmethod", "method"):
|
||||
# Not interested in staticmethods.
|
||||
raise UseInferenceDefault
|
||||
|
||||
cls = scoped_nodes.get_wrapping_class(scope)
|
||||
if not node.args:
|
||||
mro_pointer = cls
|
||||
# In we are in a classmethod, the interpreter will fill
|
||||
# automatically the class as the second argument, not an instance.
|
||||
if scope.type == "classmethod":
|
||||
mro_type = cls
|
||||
else:
|
||||
mro_type = cls.instantiate_class()
|
||||
else:
|
||||
try:
|
||||
mro_pointer = next(node.args[0].infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
try:
|
||||
mro_type = next(node.args[1].infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if mro_pointer is util.Uninferable or mro_type is util.Uninferable:
|
||||
# No way we could understand this.
|
||||
raise UseInferenceDefault
|
||||
|
||||
super_obj = objects.Super(
|
||||
mro_pointer=mro_pointer, mro_type=mro_type, self_class=cls, scope=scope
|
||||
)
|
||||
super_obj.parent = node
|
||||
return super_obj
|
||||
|
||||
|
||||
def _infer_getattr_args(node, context):
|
||||
if len(node.args) not in (2, 3):
|
||||
# Not a valid getattr call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
try:
|
||||
obj = next(node.args[0].infer(context=context))
|
||||
attr = next(node.args[1].infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if obj is util.Uninferable or attr is util.Uninferable:
|
||||
# If one of the arguments is something we can't infer,
|
||||
# then also make the result of the getattr call something
|
||||
# which is unknown.
|
||||
return util.Uninferable, util.Uninferable
|
||||
|
||||
is_string = isinstance(attr, nodes.Const) and isinstance(attr.value, str)
|
||||
if not is_string:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return obj, attr.value
|
||||
|
||||
|
||||
def infer_getattr(node, context=None):
|
||||
"""Understand getattr calls
|
||||
|
||||
If one of the arguments is an Uninferable object, then the
|
||||
result will be an Uninferable object. Otherwise, the normal attribute
|
||||
lookup will be done.
|
||||
"""
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if (
|
||||
obj is util.Uninferable
|
||||
or attr is util.Uninferable
|
||||
or not hasattr(obj, "igetattr")
|
||||
):
|
||||
return util.Uninferable
|
||||
|
||||
try:
|
||||
return next(obj.igetattr(attr, context=context))
|
||||
except (StopIteration, InferenceError, AttributeInferenceError):
|
||||
if len(node.args) == 3:
|
||||
# Try to infer the default and return it instead.
|
||||
try:
|
||||
return next(node.args[2].infer(context=context))
|
||||
except (StopIteration, InferenceError) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
raise UseInferenceDefault
|
||||
|
||||
|
||||
def infer_hasattr(node, context=None):
|
||||
"""Understand hasattr calls
|
||||
|
||||
This always guarantees three possible outcomes for calling
|
||||
hasattr: Const(False) when we are sure that the object
|
||||
doesn't have the intended attribute, Const(True) when
|
||||
we know that the object has the attribute and Uninferable
|
||||
when we are unsure of the outcome of the function call.
|
||||
"""
|
||||
try:
|
||||
obj, attr = _infer_getattr_args(node, context)
|
||||
if (
|
||||
obj is util.Uninferable
|
||||
or attr is util.Uninferable
|
||||
or not hasattr(obj, "getattr")
|
||||
):
|
||||
return util.Uninferable
|
||||
obj.getattr(attr, context=context)
|
||||
except UseInferenceDefault:
|
||||
# Can't infer something from this function call.
|
||||
return util.Uninferable
|
||||
except AttributeInferenceError:
|
||||
# Doesn't have it.
|
||||
return nodes.Const(False)
|
||||
return nodes.Const(True)
|
||||
|
||||
|
||||
def infer_callable(node, context=None):
|
||||
"""Understand callable calls
|
||||
|
||||
This follows Python's semantics, where an object
|
||||
is callable if it provides an attribute __call__,
|
||||
even though that attribute is something which can't be
|
||||
called.
|
||||
"""
|
||||
if len(node.args) != 1:
|
||||
# Invalid callable call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(inferred.callable())
|
||||
|
||||
|
||||
def infer_property(node, context=None):
|
||||
"""Understand `property` class
|
||||
|
||||
This only infers the output of `property`
|
||||
call, not the arguments themselves.
|
||||
"""
|
||||
if len(node.args) < 1:
|
||||
# Invalid property call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
getter = node.args[0]
|
||||
try:
|
||||
inferred = next(getter.infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if not isinstance(inferred, (nodes.FunctionDef, nodes.Lambda)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
return objects.Property(
|
||||
function=inferred,
|
||||
name=inferred.name,
|
||||
doc=getattr(inferred, "doc", None),
|
||||
lineno=node.lineno,
|
||||
parent=node,
|
||||
col_offset=node.col_offset,
|
||||
)
|
||||
|
||||
|
||||
def infer_bool(node, context=None):
|
||||
"""Understand bool calls."""
|
||||
if len(node.args) > 1:
|
||||
# Invalid bool call.
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not node.args:
|
||||
return nodes.Const(False)
|
||||
|
||||
argument = node.args[0]
|
||||
try:
|
||||
inferred = next(argument.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
return util.Uninferable
|
||||
if inferred is util.Uninferable:
|
||||
return util.Uninferable
|
||||
|
||||
bool_value = inferred.bool_value(context=context)
|
||||
if bool_value is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return nodes.Const(bool_value)
|
||||
|
||||
|
||||
def infer_type(node, context=None):
|
||||
"""Understand the one-argument form of *type*."""
|
||||
if len(node.args) != 1:
|
||||
raise UseInferenceDefault
|
||||
|
||||
return helpers.object_type(node.args[0], context)
|
||||
|
||||
|
||||
def infer_slice(node, context=None):
|
||||
"""Understand `slice` calls."""
|
||||
args = node.args
|
||||
if not 0 < len(args) <= 3:
|
||||
raise UseInferenceDefault
|
||||
|
||||
infer_func = partial(helpers.safe_infer, context=context)
|
||||
args = [infer_func(arg) for arg in args]
|
||||
for arg in args:
|
||||
if not arg or arg is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg, nodes.Const):
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(arg.value, (type(None), int)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(args) < 3:
|
||||
# Make sure we have 3 arguments.
|
||||
args.extend([None] * (3 - len(args)))
|
||||
|
||||
slice_node = nodes.Slice(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
|
||||
)
|
||||
slice_node.postinit(*args)
|
||||
return slice_node
|
||||
|
||||
|
||||
def _infer_object__new__decorator(node, context=None):
|
||||
# Instantiate class immediately
|
||||
# since that's what @object.__new__ does
|
||||
return iter((node.instantiate_class(),))
|
||||
|
||||
|
||||
def _infer_object__new__decorator_check(node):
|
||||
"""Predicate before inference_tip
|
||||
|
||||
Check if the given ClassDef has an @object.__new__ decorator
|
||||
"""
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if isinstance(decorator, nodes.Attribute):
|
||||
if decorator.as_string() == OBJECT_DUNDER_NEW:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def infer_issubclass(callnode, context=None):
|
||||
"""Infer issubclass() calls
|
||||
|
||||
:param nodes.Call callnode: an `issubclass` call
|
||||
:param InferenceContext context: the context for the inference
|
||||
:rtype nodes.Const: Boolean Const value of the `issubclass` call
|
||||
:raises UseInferenceDefault: If the node cannot be inferred
|
||||
"""
|
||||
call = arguments.CallSite.from_call(callnode, context=context)
|
||||
if call.keyword_arguments:
|
||||
# issubclass doesn't support keyword arguments
|
||||
raise UseInferenceDefault("TypeError: issubclass() takes no keyword arguments")
|
||||
if len(call.positional_arguments) != 2:
|
||||
raise UseInferenceDefault(
|
||||
f"Expected two arguments, got {len(call.positional_arguments)}"
|
||||
)
|
||||
# The left hand argument is the obj to be checked
|
||||
obj_node, class_or_tuple_node = call.positional_arguments
|
||||
|
||||
try:
|
||||
obj_type = next(obj_node.infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if not isinstance(obj_type, nodes.ClassDef):
|
||||
raise UseInferenceDefault("TypeError: arg 1 must be class")
|
||||
|
||||
# The right hand argument is the class(es) that the given
|
||||
# object is to be checked against.
|
||||
try:
|
||||
class_container = _class_or_tuple_to_container(
|
||||
class_or_tuple_node, context=context
|
||||
)
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
try:
|
||||
issubclass_bool = helpers.object_issubclass(obj_type, class_container, context)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc)) from exc
|
||||
except MroError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
return nodes.Const(issubclass_bool)
|
||||
|
||||
|
||||
def infer_isinstance(callnode, context=None):
|
||||
"""Infer isinstance calls
|
||||
|
||||
:param nodes.Call callnode: an isinstance call
|
||||
:param InferenceContext context: context for call
|
||||
(currently unused but is a common interface for inference)
|
||||
:rtype nodes.Const: Boolean Const value of isinstance call
|
||||
|
||||
:raises UseInferenceDefault: If the node cannot be inferred
|
||||
"""
|
||||
call = arguments.CallSite.from_call(callnode, context=context)
|
||||
if call.keyword_arguments:
|
||||
# isinstance doesn't support keyword arguments
|
||||
raise UseInferenceDefault("TypeError: isinstance() takes no keyword arguments")
|
||||
if len(call.positional_arguments) != 2:
|
||||
raise UseInferenceDefault(
|
||||
f"Expected two arguments, got {len(call.positional_arguments)}"
|
||||
)
|
||||
# The left hand argument is the obj to be checked
|
||||
obj_node, class_or_tuple_node = call.positional_arguments
|
||||
# The right hand argument is the class(es) that the given
|
||||
# obj is to be check is an instance of
|
||||
try:
|
||||
class_container = _class_or_tuple_to_container(
|
||||
class_or_tuple_node, context=context
|
||||
)
|
||||
except InferenceError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
try:
|
||||
isinstance_bool = helpers.object_isinstance(obj_node, class_container, context)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc)) from exc
|
||||
except MroError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if isinstance_bool is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
return nodes.Const(isinstance_bool)
|
||||
|
||||
|
||||
def _class_or_tuple_to_container(node, context=None):
|
||||
# Move inferences results into container
|
||||
# to simplify later logic
|
||||
# raises InferenceError if any of the inferences fall through
|
||||
try:
|
||||
node_infer = next(node.infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(node=node, context=context) from e
|
||||
# arg2 MUST be a type or a TUPLE of types
|
||||
# for isinstance
|
||||
if isinstance(node_infer, nodes.Tuple):
|
||||
try:
|
||||
class_container = [
|
||||
next(node.infer(context=context)) for node in node_infer.elts
|
||||
]
|
||||
except StopIteration as e:
|
||||
raise InferenceError(node=node, context=context) from e
|
||||
class_container = [
|
||||
klass_node for klass_node in class_container if klass_node is not None
|
||||
]
|
||||
else:
|
||||
class_container = [node_infer]
|
||||
return class_container
|
||||
|
||||
|
||||
def infer_len(node, context=None):
|
||||
"""Infer length calls
|
||||
|
||||
:param nodes.Call node: len call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const node with the inferred length, if possible
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: len() must take no keyword arguments")
|
||||
if len(call.positional_arguments) != 1:
|
||||
raise UseInferenceDefault(
|
||||
"TypeError: len() must take exactly one argument "
|
||||
"({len}) given".format(len=len(call.positional_arguments))
|
||||
)
|
||||
[argument_node] = call.positional_arguments
|
||||
|
||||
try:
|
||||
return nodes.Const(helpers.object_len(argument_node, context=context))
|
||||
except (AstroidTypeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
|
||||
def infer_str(node, context=None):
|
||||
"""Infer str() calls
|
||||
|
||||
:param nodes.Call node: str() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const containing an empty string
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: str() must take no keyword arguments")
|
||||
try:
|
||||
return nodes.Const("")
|
||||
except (AstroidTypeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
|
||||
def infer_int(node, context=None):
|
||||
"""Infer int() calls
|
||||
|
||||
:param nodes.Call node: int() call to infer
|
||||
:param context.InferenceContext: node context
|
||||
:rtype nodes.Const: a Const containing the integer value of the int() call
|
||||
"""
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
|
||||
|
||||
if call.positional_arguments:
|
||||
try:
|
||||
first_value = next(call.positional_arguments[0].infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault(str(exc)) from exc
|
||||
|
||||
if first_value is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if isinstance(first_value, nodes.Const) and isinstance(
|
||||
first_value.value, (int, str)
|
||||
):
|
||||
try:
|
||||
actual_value = int(first_value.value)
|
||||
except ValueError:
|
||||
return nodes.Const(0)
|
||||
return nodes.Const(actual_value)
|
||||
|
||||
return nodes.Const(0)
|
||||
|
||||
|
||||
def infer_dict_fromkeys(node, context=None):
|
||||
"""Infer dict.fromkeys
|
||||
|
||||
:param nodes.Call node: dict.fromkeys() call to infer
|
||||
:param context.InferenceContext context: node context
|
||||
:rtype nodes.Dict:
|
||||
a Dictionary containing the values that astroid was able to infer.
|
||||
In case the inference failed for any reason, an empty dictionary
|
||||
will be inferred instead.
|
||||
"""
|
||||
|
||||
def _build_dict_with_elements(elements):
|
||||
new_node = nodes.Dict(
|
||||
col_offset=node.col_offset, lineno=node.lineno, parent=node.parent
|
||||
)
|
||||
new_node.postinit(elements)
|
||||
return new_node
|
||||
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
if call.keyword_arguments:
|
||||
raise UseInferenceDefault("TypeError: int() must take no keyword arguments")
|
||||
if len(call.positional_arguments) not in {1, 2}:
|
||||
raise UseInferenceDefault(
|
||||
"TypeError: Needs between 1 and 2 positional arguments"
|
||||
)
|
||||
|
||||
default = nodes.Const(None)
|
||||
values = call.positional_arguments[0]
|
||||
try:
|
||||
inferred_values = next(values.infer(context=context))
|
||||
except (InferenceError, StopIteration):
|
||||
return _build_dict_with_elements([])
|
||||
if inferred_values is util.Uninferable:
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
# Limit to a couple of potential values, as this can become pretty complicated
|
||||
accepted_iterable_elements = (nodes.Const,)
|
||||
if isinstance(inferred_values, (nodes.List, nodes.Set, nodes.Tuple)):
|
||||
elements = inferred_values.elts
|
||||
for element in elements:
|
||||
if not isinstance(element, accepted_iterable_elements):
|
||||
# Fallback to an empty dict
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
elements_with_value = [(element, default) for element in elements]
|
||||
return _build_dict_with_elements(elements_with_value)
|
||||
if isinstance(inferred_values, nodes.Const) and isinstance(
|
||||
inferred_values.value, (str, bytes)
|
||||
):
|
||||
elements = [
|
||||
(nodes.Const(element), default) for element in inferred_values.value
|
||||
]
|
||||
return _build_dict_with_elements(elements)
|
||||
if isinstance(inferred_values, nodes.Dict):
|
||||
keys = inferred_values.itered()
|
||||
for key in keys:
|
||||
if not isinstance(key, accepted_iterable_elements):
|
||||
# Fallback to an empty dict
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
elements_with_value = [(element, default) for element in keys]
|
||||
return _build_dict_with_elements(elements_with_value)
|
||||
|
||||
# Fallback to an empty dictionary
|
||||
return _build_dict_with_elements([])
|
||||
|
||||
|
||||
# Builtins inference
|
||||
register_builtin_transform(infer_bool, "bool")
|
||||
register_builtin_transform(infer_super, "super")
|
||||
register_builtin_transform(infer_callable, "callable")
|
||||
register_builtin_transform(infer_property, "property")
|
||||
register_builtin_transform(infer_getattr, "getattr")
|
||||
register_builtin_transform(infer_hasattr, "hasattr")
|
||||
register_builtin_transform(infer_tuple, "tuple")
|
||||
register_builtin_transform(infer_set, "set")
|
||||
register_builtin_transform(infer_list, "list")
|
||||
register_builtin_transform(infer_dict, "dict")
|
||||
register_builtin_transform(infer_frozenset, "frozenset")
|
||||
register_builtin_transform(infer_type, "type")
|
||||
register_builtin_transform(infer_slice, "slice")
|
||||
register_builtin_transform(infer_isinstance, "isinstance")
|
||||
register_builtin_transform(infer_issubclass, "issubclass")
|
||||
register_builtin_transform(infer_len, "len")
|
||||
register_builtin_transform(infer_str, "str")
|
||||
register_builtin_transform(infer_int, "int")
|
||||
register_builtin_transform(infer_dict_fromkeys, "dict.fromkeys")
|
||||
|
||||
|
||||
# Infer object.__new__ calls
|
||||
AstroidManager().register_transform(
|
||||
nodes.ClassDef,
|
||||
inference_tip(_infer_object__new__decorator),
|
||||
_infer_object__new__decorator_check,
|
||||
)
|
|
@ -1,132 +0,0 @@
|
|||
# Copyright (c) 2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016-2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 John Belmonte <john@neggie.net>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import extract_node, parse
|
||||
from astroid.const import PY39_PLUS
|
||||
from astroid.exceptions import AttributeInferenceError
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.scoped_nodes import ClassDef
|
||||
|
||||
|
||||
def _collections_transform():
|
||||
return parse(
|
||||
"""
|
||||
class defaultdict(dict):
|
||||
default_factory = None
|
||||
def __missing__(self, key): pass
|
||||
def __getitem__(self, key): return default_factory
|
||||
|
||||
"""
|
||||
+ _deque_mock()
|
||||
+ _ordered_dict_mock()
|
||||
)
|
||||
|
||||
|
||||
def _deque_mock():
|
||||
base_deque_class = """
|
||||
class deque(object):
|
||||
maxlen = 0
|
||||
def __init__(self, iterable=None, maxlen=None):
|
||||
self.iterable = iterable or []
|
||||
def append(self, x): pass
|
||||
def appendleft(self, x): pass
|
||||
def clear(self): pass
|
||||
def count(self, x): return 0
|
||||
def extend(self, iterable): pass
|
||||
def extendleft(self, iterable): pass
|
||||
def pop(self): return self.iterable[0]
|
||||
def popleft(self): return self.iterable[0]
|
||||
def remove(self, value): pass
|
||||
def reverse(self): return reversed(self.iterable)
|
||||
def rotate(self, n=1): return self
|
||||
def __iter__(self): return self
|
||||
def __reversed__(self): return self.iterable[::-1]
|
||||
def __getitem__(self, index): return self.iterable[index]
|
||||
def __setitem__(self, index, value): pass
|
||||
def __delitem__(self, index): pass
|
||||
def __bool__(self): return bool(self.iterable)
|
||||
def __nonzero__(self): return bool(self.iterable)
|
||||
def __contains__(self, o): return o in self.iterable
|
||||
def __len__(self): return len(self.iterable)
|
||||
def __copy__(self): return deque(self.iterable)
|
||||
def copy(self): return deque(self.iterable)
|
||||
def index(self, x, start=0, end=0): return 0
|
||||
def insert(self, i, x): pass
|
||||
def __add__(self, other): pass
|
||||
def __iadd__(self, other): pass
|
||||
def __mul__(self, other): pass
|
||||
def __imul__(self, other): pass
|
||||
def __rmul__(self, other): pass"""
|
||||
if PY39_PLUS:
|
||||
base_deque_class += """
|
||||
@classmethod
|
||||
def __class_getitem__(self, item): return cls"""
|
||||
return base_deque_class
|
||||
|
||||
|
||||
def _ordered_dict_mock():
|
||||
base_ordered_dict_class = """
|
||||
class OrderedDict(dict):
|
||||
def __reversed__(self): return self[::-1]
|
||||
def move_to_end(self, key, last=False): pass"""
|
||||
if PY39_PLUS:
|
||||
base_ordered_dict_class += """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, item): return cls"""
|
||||
return base_ordered_dict_class
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "collections", _collections_transform)
|
||||
|
||||
|
||||
def _looks_like_subscriptable(node: ClassDef) -> bool:
|
||||
"""
|
||||
Returns True if the node corresponds to a ClassDef of the Collections.abc module that
|
||||
supports subscripting
|
||||
|
||||
:param node: ClassDef node
|
||||
"""
|
||||
if node.qname().startswith("_collections") or node.qname().startswith(
|
||||
"collections"
|
||||
):
|
||||
try:
|
||||
node.getattr("__class_getitem__")
|
||||
return True
|
||||
except AttributeInferenceError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
CLASS_GET_ITEM_TEMPLATE = """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, item):
|
||||
return cls
|
||||
"""
|
||||
|
||||
|
||||
def easy_class_getitem_inference(node, context=None):
|
||||
# Here __class_getitem__ exists but is quite a mess to infer thus
|
||||
# put an easy inference tip
|
||||
func_to_add = extract_node(CLASS_GET_ITEM_TEMPLATE)
|
||||
node.locals["__class_getitem__"] = [func_to_add]
|
||||
|
||||
|
||||
if PY39_PLUS:
|
||||
# Starting with Python39 some objects of the collection module are subscriptable
|
||||
# thanks to the __class_getitem__ method but the way it is implemented in
|
||||
# _collection_abc makes it difficult to infer. (We would have to handle AssignName inference in the
|
||||
# getitem method of the ClassDef class) Instead we put here a mock of the __class_getitem__ method
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, easy_class_getitem_inference, _looks_like_subscriptable
|
||||
)
|
|
@ -1,26 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.const import PY37_PLUS
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
if PY37_PLUS:
|
||||
# Since Python 3.7 Hashing Methods are added
|
||||
# dynamically to globals()
|
||||
|
||||
def _re_transform():
|
||||
return parse(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
_Method = namedtuple('_Method', 'name ident salt_chars total_size')
|
||||
|
||||
METHOD_SHA512 = _Method('SHA512', '6', 16, 106)
|
||||
METHOD_SHA256 = _Method('SHA256', '5', 16, 63)
|
||||
METHOD_BLOWFISH = _Method('BLOWFISH', 2, 'b', 22)
|
||||
METHOD_MD5 = _Method('MD5', '1', 8, 34)
|
||||
METHOD_CRYPT = _Method('CRYPT', None, 2, 13)
|
||||
"""
|
||||
)
|
||||
|
||||
register_module_extender(AstroidManager(), "crypt", _re_transform)
|
|
@ -1,78 +0,0 @@
|
|||
"""
|
||||
Astroid hooks for ctypes module.
|
||||
|
||||
Inside the ctypes module, the value class is defined inside
|
||||
the C coded module _ctypes.
|
||||
Thus astroid doesn't know that the value member is a builtin type
|
||||
among float, int, bytes or str.
|
||||
"""
|
||||
import sys
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def enrich_ctypes_redefined_types():
|
||||
"""
|
||||
For each ctypes redefined types, overload 'value' and '_type_' members definition.
|
||||
Overloading 'value' is mandatory otherwise astroid cannot infer the correct type for it.
|
||||
Overloading '_type_' is necessary because the class definition made here replaces the original
|
||||
one, in which '_type_' member is defined. Luckily those original class definitions are very short
|
||||
and contain only the '_type_' member definition.
|
||||
"""
|
||||
c_class_to_type = (
|
||||
("c_byte", "int", "b"),
|
||||
("c_char", "bytes", "c"),
|
||||
("c_double", "float", "d"),
|
||||
("c_float", "float", "f"),
|
||||
("c_int", "int", "i"),
|
||||
("c_int16", "int", "h"),
|
||||
("c_int32", "int", "i"),
|
||||
("c_int64", "int", "l"),
|
||||
("c_int8", "int", "b"),
|
||||
("c_long", "int", "l"),
|
||||
("c_longdouble", "float", "g"),
|
||||
("c_longlong", "int", "l"),
|
||||
("c_short", "int", "h"),
|
||||
("c_size_t", "int", "L"),
|
||||
("c_ssize_t", "int", "l"),
|
||||
("c_ubyte", "int", "B"),
|
||||
("c_uint", "int", "I"),
|
||||
("c_uint16", "int", "H"),
|
||||
("c_uint32", "int", "I"),
|
||||
("c_uint64", "int", "L"),
|
||||
("c_uint8", "int", "B"),
|
||||
("c_ulong", "int", "L"),
|
||||
("c_ulonglong", "int", "L"),
|
||||
("c_ushort", "int", "H"),
|
||||
("c_wchar", "str", "u"),
|
||||
)
|
||||
|
||||
src = [
|
||||
"""
|
||||
from _ctypes import _SimpleCData
|
||||
|
||||
class c_bool(_SimpleCData):
|
||||
def __init__(self, value):
|
||||
self.value = True
|
||||
self._type_ = '?'
|
||||
"""
|
||||
]
|
||||
|
||||
for c_type, builtin_type, type_code in c_class_to_type:
|
||||
src.append(
|
||||
f"""
|
||||
class {c_type}(_SimpleCData):
|
||||
def __init__(self, value):
|
||||
self.value = {builtin_type}(value)
|
||||
self._type_ = '{type_code}'
|
||||
"""
|
||||
)
|
||||
|
||||
return parse("\n".join(src))
|
||||
|
||||
|
||||
if not hasattr(sys, "pypy_version_info"):
|
||||
# No need of this module in pypy where everything is written in python
|
||||
register_module_extender(AstroidManager(), "ctypes", enrich_ctypes_redefined_types)
|
|
@ -1,181 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _curses_transform():
|
||||
return parse(
|
||||
"""
|
||||
A_ALTCHARSET = 1
|
||||
A_BLINK = 1
|
||||
A_BOLD = 1
|
||||
A_DIM = 1
|
||||
A_INVIS = 1
|
||||
A_ITALIC = 1
|
||||
A_NORMAL = 1
|
||||
A_PROTECT = 1
|
||||
A_REVERSE = 1
|
||||
A_STANDOUT = 1
|
||||
A_UNDERLINE = 1
|
||||
A_HORIZONTAL = 1
|
||||
A_LEFT = 1
|
||||
A_LOW = 1
|
||||
A_RIGHT = 1
|
||||
A_TOP = 1
|
||||
A_VERTICAL = 1
|
||||
A_CHARTEXT = 1
|
||||
A_ATTRIBUTES = 1
|
||||
A_CHARTEXT = 1
|
||||
A_COLOR = 1
|
||||
KEY_MIN = 1
|
||||
KEY_BREAK = 1
|
||||
KEY_DOWN = 1
|
||||
KEY_UP = 1
|
||||
KEY_LEFT = 1
|
||||
KEY_RIGHT = 1
|
||||
KEY_HOME = 1
|
||||
KEY_BACKSPACE = 1
|
||||
KEY_F0 = 1
|
||||
KEY_Fn = 1
|
||||
KEY_DL = 1
|
||||
KEY_IL = 1
|
||||
KEY_DC = 1
|
||||
KEY_IC = 1
|
||||
KEY_EIC = 1
|
||||
KEY_CLEAR = 1
|
||||
KEY_EOS = 1
|
||||
KEY_EOL = 1
|
||||
KEY_SF = 1
|
||||
KEY_SR = 1
|
||||
KEY_NPAGE = 1
|
||||
KEY_PPAGE = 1
|
||||
KEY_STAB = 1
|
||||
KEY_CTAB = 1
|
||||
KEY_CATAB = 1
|
||||
KEY_ENTER = 1
|
||||
KEY_SRESET = 1
|
||||
KEY_RESET = 1
|
||||
KEY_PRINT = 1
|
||||
KEY_LL = 1
|
||||
KEY_A1 = 1
|
||||
KEY_A3 = 1
|
||||
KEY_B2 = 1
|
||||
KEY_C1 = 1
|
||||
KEY_C3 = 1
|
||||
KEY_BTAB = 1
|
||||
KEY_BEG = 1
|
||||
KEY_CANCEL = 1
|
||||
KEY_CLOSE = 1
|
||||
KEY_COMMAND = 1
|
||||
KEY_COPY = 1
|
||||
KEY_CREATE = 1
|
||||
KEY_END = 1
|
||||
KEY_EXIT = 1
|
||||
KEY_FIND = 1
|
||||
KEY_HELP = 1
|
||||
KEY_MARK = 1
|
||||
KEY_MESSAGE = 1
|
||||
KEY_MOVE = 1
|
||||
KEY_NEXT = 1
|
||||
KEY_OPEN = 1
|
||||
KEY_OPTIONS = 1
|
||||
KEY_PREVIOUS = 1
|
||||
KEY_REDO = 1
|
||||
KEY_REFERENCE = 1
|
||||
KEY_REFRESH = 1
|
||||
KEY_REPLACE = 1
|
||||
KEY_RESTART = 1
|
||||
KEY_RESUME = 1
|
||||
KEY_SAVE = 1
|
||||
KEY_SBEG = 1
|
||||
KEY_SCANCEL = 1
|
||||
KEY_SCOMMAND = 1
|
||||
KEY_SCOPY = 1
|
||||
KEY_SCREATE = 1
|
||||
KEY_SDC = 1
|
||||
KEY_SDL = 1
|
||||
KEY_SELECT = 1
|
||||
KEY_SEND = 1
|
||||
KEY_SEOL = 1
|
||||
KEY_SEXIT = 1
|
||||
KEY_SFIND = 1
|
||||
KEY_SHELP = 1
|
||||
KEY_SHOME = 1
|
||||
KEY_SIC = 1
|
||||
KEY_SLEFT = 1
|
||||
KEY_SMESSAGE = 1
|
||||
KEY_SMOVE = 1
|
||||
KEY_SNEXT = 1
|
||||
KEY_SOPTIONS = 1
|
||||
KEY_SPREVIOUS = 1
|
||||
KEY_SPRINT = 1
|
||||
KEY_SREDO = 1
|
||||
KEY_SREPLACE = 1
|
||||
KEY_SRIGHT = 1
|
||||
KEY_SRSUME = 1
|
||||
KEY_SSAVE = 1
|
||||
KEY_SSUSPEND = 1
|
||||
KEY_SUNDO = 1
|
||||
KEY_SUSPEND = 1
|
||||
KEY_UNDO = 1
|
||||
KEY_MOUSE = 1
|
||||
KEY_RESIZE = 1
|
||||
KEY_MAX = 1
|
||||
ACS_BBSS = 1
|
||||
ACS_BLOCK = 1
|
||||
ACS_BOARD = 1
|
||||
ACS_BSBS = 1
|
||||
ACS_BSSB = 1
|
||||
ACS_BSSS = 1
|
||||
ACS_BTEE = 1
|
||||
ACS_BULLET = 1
|
||||
ACS_CKBOARD = 1
|
||||
ACS_DARROW = 1
|
||||
ACS_DEGREE = 1
|
||||
ACS_DIAMOND = 1
|
||||
ACS_GEQUAL = 1
|
||||
ACS_HLINE = 1
|
||||
ACS_LANTERN = 1
|
||||
ACS_LARROW = 1
|
||||
ACS_LEQUAL = 1
|
||||
ACS_LLCORNER = 1
|
||||
ACS_LRCORNER = 1
|
||||
ACS_LTEE = 1
|
||||
ACS_NEQUAL = 1
|
||||
ACS_PI = 1
|
||||
ACS_PLMINUS = 1
|
||||
ACS_PLUS = 1
|
||||
ACS_RARROW = 1
|
||||
ACS_RTEE = 1
|
||||
ACS_S1 = 1
|
||||
ACS_S3 = 1
|
||||
ACS_S7 = 1
|
||||
ACS_S9 = 1
|
||||
ACS_SBBS = 1
|
||||
ACS_SBSB = 1
|
||||
ACS_SBSS = 1
|
||||
ACS_SSBB = 1
|
||||
ACS_SSBS = 1
|
||||
ACS_SSSB = 1
|
||||
ACS_SSSS = 1
|
||||
ACS_STERLING = 1
|
||||
ACS_TTEE = 1
|
||||
ACS_UARROW = 1
|
||||
ACS_ULCORNER = 1
|
||||
ACS_URCORNER = 1
|
||||
ACS_VLINE = 1
|
||||
COLOR_BLACK = 1
|
||||
COLOR_BLUE = 1
|
||||
COLOR_CYAN = 1
|
||||
COLOR_GREEN = 1
|
||||
COLOR_MAGENTA = 1
|
||||
COLOR_RED = 1
|
||||
COLOR_WHITE = 1
|
||||
COLOR_YELLOW = 1
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "curses", _curses_transform)
|
|
@ -1,464 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
"""
|
||||
Astroid hook for the dataclasses library
|
||||
|
||||
Support built-in dataclasses, pydantic.dataclasses, and marshmallow_dataclass-annotated
|
||||
dataclasses. References:
|
||||
- https://docs.python.org/3/library/dataclasses.html
|
||||
- https://pydantic-docs.helpmanual.io/usage/dataclasses/
|
||||
- https://lovasoa.github.io/marshmallow_dataclass/
|
||||
|
||||
"""
|
||||
import sys
|
||||
from typing import FrozenSet, Generator, List, Optional, Tuple, Union
|
||||
|
||||
from astroid import context, inference_tip
|
||||
from astroid.builder import parse
|
||||
from astroid.const import PY37_PLUS, PY39_PLUS
|
||||
from astroid.exceptions import (
|
||||
AstroidSyntaxError,
|
||||
InferenceError,
|
||||
MroError,
|
||||
UseInferenceDefault,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import (
|
||||
AnnAssign,
|
||||
Assign,
|
||||
AssignName,
|
||||
Attribute,
|
||||
Call,
|
||||
Name,
|
||||
NodeNG,
|
||||
Subscript,
|
||||
Unknown,
|
||||
)
|
||||
from astroid.nodes.scoped_nodes import ClassDef, FunctionDef
|
||||
from astroid.util import Uninferable
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Literal
|
||||
else:
|
||||
from typing_extensions import Literal
|
||||
|
||||
_FieldDefaultReturn = Union[
|
||||
None, Tuple[Literal["default"], NodeNG], Tuple[Literal["default_factory"], Call]
|
||||
]
|
||||
|
||||
DATACLASSES_DECORATORS = frozenset(("dataclass",))
|
||||
FIELD_NAME = "field"
|
||||
DATACLASS_MODULES = frozenset(
|
||||
("dataclasses", "marshmallow_dataclass", "pydantic.dataclasses")
|
||||
)
|
||||
DEFAULT_FACTORY = "_HAS_DEFAULT_FACTORY" # based on typing.py
|
||||
|
||||
|
||||
def is_decorated_with_dataclass(node, decorator_names=DATACLASSES_DECORATORS):
|
||||
"""Return True if a decorated node has a `dataclass` decorator applied."""
|
||||
if not isinstance(node, ClassDef) or not node.decorators:
|
||||
return False
|
||||
|
||||
return any(
|
||||
_looks_like_dataclass_decorator(decorator_attribute, decorator_names)
|
||||
for decorator_attribute in node.decorators.nodes
|
||||
)
|
||||
|
||||
|
||||
def dataclass_transform(node: ClassDef) -> None:
|
||||
"""Rewrite a dataclass to be easily understood by pylint"""
|
||||
|
||||
for assign_node in _get_dataclass_attributes(node):
|
||||
name = assign_node.target.name
|
||||
|
||||
rhs_node = Unknown(
|
||||
lineno=assign_node.lineno,
|
||||
col_offset=assign_node.col_offset,
|
||||
parent=assign_node,
|
||||
)
|
||||
rhs_node = AstroidManager().visit_transforms(rhs_node)
|
||||
node.instance_attrs[name] = [rhs_node]
|
||||
|
||||
if not _check_generate_dataclass_init(node):
|
||||
return
|
||||
|
||||
try:
|
||||
reversed_mro = list(reversed(node.mro()))
|
||||
except MroError:
|
||||
reversed_mro = [node]
|
||||
|
||||
field_assigns = {}
|
||||
field_order = []
|
||||
for klass in (k for k in reversed_mro if is_decorated_with_dataclass(k)):
|
||||
for assign_node in _get_dataclass_attributes(klass, init=True):
|
||||
name = assign_node.target.name
|
||||
if name not in field_assigns:
|
||||
field_order.append(name)
|
||||
field_assigns[name] = assign_node
|
||||
|
||||
init_str = _generate_dataclass_init([field_assigns[name] for name in field_order])
|
||||
try:
|
||||
init_node = parse(init_str)["__init__"]
|
||||
except AstroidSyntaxError:
|
||||
pass
|
||||
else:
|
||||
init_node.parent = node
|
||||
init_node.lineno, init_node.col_offset = None, None
|
||||
node.locals["__init__"] = [init_node]
|
||||
|
||||
root = node.root()
|
||||
if DEFAULT_FACTORY not in root.locals:
|
||||
new_assign = parse(f"{DEFAULT_FACTORY} = object()").body[0]
|
||||
new_assign.parent = root
|
||||
root.locals[DEFAULT_FACTORY] = [new_assign.targets[0]]
|
||||
|
||||
|
||||
def _get_dataclass_attributes(node: ClassDef, init: bool = False) -> Generator:
|
||||
"""Yield the AnnAssign nodes of dataclass attributes for the node.
|
||||
|
||||
If init is True, also include InitVars, but exclude attributes from calls to
|
||||
field where init=False.
|
||||
"""
|
||||
for assign_node in node.body:
|
||||
if not isinstance(assign_node, AnnAssign) or not isinstance(
|
||||
assign_node.target, AssignName
|
||||
):
|
||||
continue
|
||||
|
||||
if _is_class_var(assign_node.annotation): # type: ignore[arg-type] # annotation is never None
|
||||
continue
|
||||
|
||||
if init:
|
||||
value = assign_node.value
|
||||
if (
|
||||
isinstance(value, Call)
|
||||
and _looks_like_dataclass_field_call(value, check_scope=False)
|
||||
and any(
|
||||
keyword.arg == "init"
|
||||
and not keyword.value.bool_value() # type: ignore[union-attr] # value is never None
|
||||
for keyword in value.keywords
|
||||
)
|
||||
):
|
||||
continue
|
||||
elif _is_init_var(assign_node.annotation): # type: ignore[arg-type] # annotation is never None
|
||||
continue
|
||||
|
||||
yield assign_node
|
||||
|
||||
|
||||
def _check_generate_dataclass_init(node: ClassDef) -> bool:
|
||||
"""Return True if we should generate an __init__ method for node.
|
||||
|
||||
This is True when:
|
||||
- node doesn't define its own __init__ method
|
||||
- the dataclass decorator was called *without* the keyword argument init=False
|
||||
"""
|
||||
if "__init__" in node.locals:
|
||||
return False
|
||||
|
||||
found = None
|
||||
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if not isinstance(decorator_attribute, Call):
|
||||
continue
|
||||
|
||||
if _looks_like_dataclass_decorator(decorator_attribute):
|
||||
found = decorator_attribute
|
||||
|
||||
if found is None:
|
||||
return True
|
||||
|
||||
# Check for keyword arguments of the form init=False
|
||||
return all(
|
||||
keyword.arg != "init"
|
||||
and keyword.value.bool_value() # type: ignore[union-attr] # value is never None
|
||||
for keyword in found.keywords
|
||||
)
|
||||
|
||||
|
||||
def _generate_dataclass_init(assigns: List[AnnAssign]) -> str:
|
||||
"""Return an init method for a dataclass given the targets."""
|
||||
target_names = []
|
||||
params = []
|
||||
assignments = []
|
||||
|
||||
for assign in assigns:
|
||||
name, annotation, value = assign.target.name, assign.annotation, assign.value
|
||||
target_names.append(name)
|
||||
|
||||
if _is_init_var(annotation): # type: ignore[arg-type] # annotation is never None
|
||||
init_var = True
|
||||
if isinstance(annotation, Subscript):
|
||||
annotation = annotation.slice
|
||||
else:
|
||||
# Cannot determine type annotation for parameter from InitVar
|
||||
annotation = None
|
||||
assignment_str = ""
|
||||
else:
|
||||
init_var = False
|
||||
assignment_str = f"self.{name} = {name}"
|
||||
|
||||
if annotation:
|
||||
param_str = f"{name}: {annotation.as_string()}"
|
||||
else:
|
||||
param_str = name
|
||||
|
||||
if value:
|
||||
if isinstance(value, Call) and _looks_like_dataclass_field_call(
|
||||
value, check_scope=False
|
||||
):
|
||||
result = _get_field_default(value)
|
||||
if result:
|
||||
default_type, default_node = result
|
||||
if default_type == "default":
|
||||
param_str += f" = {default_node.as_string()}"
|
||||
elif default_type == "default_factory":
|
||||
param_str += f" = {DEFAULT_FACTORY}"
|
||||
assignment_str = (
|
||||
f"self.{name} = {default_node.as_string()} "
|
||||
f"if {name} is {DEFAULT_FACTORY} else {name}"
|
||||
)
|
||||
else:
|
||||
param_str += f" = {value.as_string()}"
|
||||
|
||||
params.append(param_str)
|
||||
if not init_var:
|
||||
assignments.append(assignment_str)
|
||||
|
||||
params_string = ", ".join(["self"] + params)
|
||||
assignments_string = "\n ".join(assignments) if assignments else "pass"
|
||||
return f"def __init__({params_string}) -> None:\n {assignments_string}"
|
||||
|
||||
|
||||
def infer_dataclass_attribute(
|
||||
node: Unknown, ctx: Optional[context.InferenceContext] = None
|
||||
) -> Generator:
|
||||
"""Inference tip for an Unknown node that was dynamically generated to
|
||||
represent a dataclass attribute.
|
||||
|
||||
In the case that a default value is provided, that is inferred first.
|
||||
Then, an Instance of the annotated class is yielded.
|
||||
"""
|
||||
assign = node.parent
|
||||
if not isinstance(assign, AnnAssign):
|
||||
yield Uninferable
|
||||
return
|
||||
|
||||
annotation, value = assign.annotation, assign.value
|
||||
if value is not None:
|
||||
yield from value.infer(context=ctx)
|
||||
if annotation is not None:
|
||||
yield from _infer_instance_from_annotation(annotation, ctx=ctx)
|
||||
else:
|
||||
yield Uninferable
|
||||
|
||||
|
||||
def infer_dataclass_field_call(
|
||||
node: Call, ctx: Optional[context.InferenceContext] = None
|
||||
) -> Generator:
|
||||
"""Inference tip for dataclass field calls."""
|
||||
if not isinstance(node.parent, (AnnAssign, Assign)):
|
||||
raise UseInferenceDefault
|
||||
result = _get_field_default(node)
|
||||
if not result:
|
||||
yield Uninferable
|
||||
else:
|
||||
default_type, default = result
|
||||
if default_type == "default":
|
||||
yield from default.infer(context=ctx)
|
||||
else:
|
||||
new_call = parse(default.as_string()).body[0].value
|
||||
new_call.parent = node.parent
|
||||
yield from new_call.infer(context=ctx)
|
||||
|
||||
|
||||
def _looks_like_dataclass_decorator(
|
||||
node: NodeNG, decorator_names: FrozenSet[str] = DATACLASSES_DECORATORS
|
||||
) -> bool:
|
||||
"""Return True if node looks like a dataclass decorator.
|
||||
|
||||
Uses inference to lookup the value of the node, and if that fails,
|
||||
matches against specific names.
|
||||
"""
|
||||
if isinstance(node, Call): # decorator with arguments
|
||||
node = node.func
|
||||
try:
|
||||
inferred = next(node.infer())
|
||||
except (InferenceError, StopIteration):
|
||||
inferred = Uninferable
|
||||
|
||||
if inferred is Uninferable:
|
||||
if isinstance(node, Name):
|
||||
return node.name in decorator_names
|
||||
if isinstance(node, Attribute):
|
||||
return node.attrname in decorator_names
|
||||
|
||||
return False
|
||||
|
||||
return (
|
||||
isinstance(inferred, FunctionDef)
|
||||
and inferred.name in decorator_names
|
||||
and inferred.root().name in DATACLASS_MODULES
|
||||
)
|
||||
|
||||
|
||||
def _looks_like_dataclass_attribute(node: Unknown) -> bool:
|
||||
"""Return True if node was dynamically generated as the child of an AnnAssign
|
||||
statement.
|
||||
"""
|
||||
parent = node.parent
|
||||
if not parent:
|
||||
return False
|
||||
|
||||
scope = parent.scope()
|
||||
return (
|
||||
isinstance(parent, AnnAssign)
|
||||
and isinstance(scope, ClassDef)
|
||||
and is_decorated_with_dataclass(scope)
|
||||
)
|
||||
|
||||
|
||||
def _looks_like_dataclass_field_call(node: Call, check_scope: bool = True) -> bool:
|
||||
"""Return True if node is calling dataclasses field or Field
|
||||
from an AnnAssign statement directly in the body of a ClassDef.
|
||||
|
||||
If check_scope is False, skips checking the statement and body.
|
||||
"""
|
||||
if check_scope:
|
||||
stmt = node.statement(future=True)
|
||||
scope = stmt.scope()
|
||||
if not (
|
||||
isinstance(stmt, AnnAssign)
|
||||
and stmt.value is not None
|
||||
and isinstance(scope, ClassDef)
|
||||
and is_decorated_with_dataclass(scope)
|
||||
):
|
||||
return False
|
||||
|
||||
try:
|
||||
inferred = next(node.func.infer())
|
||||
except (InferenceError, StopIteration):
|
||||
return False
|
||||
|
||||
if not isinstance(inferred, FunctionDef):
|
||||
return False
|
||||
|
||||
return inferred.name == FIELD_NAME and inferred.root().name in DATACLASS_MODULES
|
||||
|
||||
|
||||
def _get_field_default(field_call: Call) -> _FieldDefaultReturn:
|
||||
"""Return a the default value of a field call, and the corresponding keyword argument name.
|
||||
|
||||
field(default=...) results in the ... node
|
||||
field(default_factory=...) results in a Call node with func ... and no arguments
|
||||
|
||||
If neither or both arguments are present, return ("", None) instead,
|
||||
indicating that there is not a valid default value.
|
||||
"""
|
||||
default, default_factory = None, None
|
||||
for keyword in field_call.keywords:
|
||||
if keyword.arg == "default":
|
||||
default = keyword.value
|
||||
elif keyword.arg == "default_factory":
|
||||
default_factory = keyword.value
|
||||
|
||||
if default is not None and default_factory is None:
|
||||
return "default", default
|
||||
|
||||
if default is None and default_factory is not None:
|
||||
new_call = Call(
|
||||
lineno=field_call.lineno,
|
||||
col_offset=field_call.col_offset,
|
||||
parent=field_call.parent,
|
||||
)
|
||||
new_call.postinit(func=default_factory)
|
||||
return "default_factory", new_call
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def _is_class_var(node: NodeNG) -> bool:
|
||||
"""Return True if node is a ClassVar, with or without subscripting."""
|
||||
if PY39_PLUS:
|
||||
try:
|
||||
inferred = next(node.infer())
|
||||
except (InferenceError, StopIteration):
|
||||
return False
|
||||
|
||||
return getattr(inferred, "name", "") == "ClassVar"
|
||||
|
||||
# Before Python 3.9, inference returns typing._SpecialForm instead of ClassVar.
|
||||
# Our backup is to inspect the node's structure.
|
||||
return isinstance(node, Subscript) and (
|
||||
isinstance(node.value, Name)
|
||||
and node.value.name == "ClassVar"
|
||||
or isinstance(node.value, Attribute)
|
||||
and node.value.attrname == "ClassVar"
|
||||
)
|
||||
|
||||
|
||||
def _is_init_var(node: NodeNG) -> bool:
|
||||
"""Return True if node is an InitVar, with or without subscripting."""
|
||||
try:
|
||||
inferred = next(node.infer())
|
||||
except (InferenceError, StopIteration):
|
||||
return False
|
||||
|
||||
return getattr(inferred, "name", "") == "InitVar"
|
||||
|
||||
|
||||
# Allowed typing classes for which we support inferring instances
|
||||
_INFERABLE_TYPING_TYPES = frozenset(
|
||||
(
|
||||
"Dict",
|
||||
"FrozenSet",
|
||||
"List",
|
||||
"Set",
|
||||
"Tuple",
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _infer_instance_from_annotation(
|
||||
node: NodeNG, ctx: Optional[context.InferenceContext] = None
|
||||
) -> Generator:
|
||||
"""Infer an instance corresponding to the type annotation represented by node.
|
||||
|
||||
Currently has limited support for the typing module.
|
||||
"""
|
||||
klass = None
|
||||
try:
|
||||
klass = next(node.infer(context=ctx))
|
||||
except (InferenceError, StopIteration):
|
||||
yield Uninferable
|
||||
if not isinstance(klass, ClassDef):
|
||||
yield Uninferable
|
||||
elif klass.root().name in {
|
||||
"typing",
|
||||
"_collections_abc",
|
||||
"",
|
||||
}: # "" because of synthetic nodes in brain_typing.py
|
||||
if klass.name in _INFERABLE_TYPING_TYPES:
|
||||
yield klass.instantiate_class()
|
||||
else:
|
||||
yield Uninferable
|
||||
else:
|
||||
yield klass.instantiate_class()
|
||||
|
||||
|
||||
if PY37_PLUS:
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, dataclass_transform, is_decorated_with_dataclass
|
||||
)
|
||||
|
||||
AstroidManager().register_transform(
|
||||
Call,
|
||||
inference_tip(infer_dataclass_field_call, raise_on_overwrite=True),
|
||||
_looks_like_dataclass_field_call,
|
||||
)
|
||||
|
||||
AstroidManager().register_transform(
|
||||
Unknown,
|
||||
inference_tip(infer_dataclass_attribute, raise_on_overwrite=True),
|
||||
_looks_like_dataclass_attribute,
|
||||
)
|
|
@ -1,32 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015 raylu <lurayl@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for dateutil"""
|
||||
|
||||
import textwrap
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def dateutil_transform():
|
||||
return AstroidBuilder(AstroidManager()).string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import datetime
|
||||
def parse(timestr, parserinfo=None, **kwargs):
|
||||
return datetime.datetime()
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "dateutil.parser", dateutil_transform)
|
|
@ -1,52 +0,0 @@
|
|||
# Copyright (c) 2017-2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Karthikeyan Singaravelan <tir.karthi@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
import collections.abc
|
||||
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import FormattedValue
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
|
||||
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
|
||||
if other_fields:
|
||||
init_params.update({param: getattr(node, param) for param in other_fields})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, "postinit") and _astroid_fields:
|
||||
for param, child in postinit_params.items():
|
||||
if child and not isinstance(child, collections.abc.Sequence):
|
||||
cloned_child = _clone_node_with_lineno(
|
||||
node=child, lineno=new_node.lineno, parent=new_node
|
||||
)
|
||||
postinit_params[param] = cloned_child
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def _transform_formatted_value(node): # pylint: disable=inconsistent-return-statements
|
||||
if node.value and node.value.lineno == 1:
|
||||
if node.lineno != node.value.lineno:
|
||||
new_node = FormattedValue(
|
||||
lineno=node.lineno, col_offset=node.col_offset, parent=node.parent
|
||||
)
|
||||
new_value = _clone_node_with_lineno(
|
||||
node=node.value, lineno=node.lineno, parent=new_node
|
||||
)
|
||||
new_node.postinit(value=new_value, format_spec=node.format_spec)
|
||||
return new_node
|
||||
|
||||
|
||||
# TODO: this fix tries to *patch* http://bugs.python.org/issue29051
|
||||
# The problem is that FormattedValue.value, which is a Name node,
|
||||
# has wrong line numbers, usually 1. This creates problems for pylint,
|
||||
# which expects correct line numbers for things such as message control.
|
||||
AstroidManager().register_transform(FormattedValue, _transform_formatted_value)
|
|
@ -1,157 +0,0 @@
|
|||
# Copyright (c) 2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2018 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Alphadelta14 <alpha@alphaservcomputing.solutions>
|
||||
|
||||
"""Astroid hooks for understanding functools library module."""
|
||||
from functools import partial
|
||||
from itertools import chain
|
||||
|
||||
from astroid import BoundMethod, arguments, extract_node, helpers, objects
|
||||
from astroid.exceptions import InferenceError, UseInferenceDefault
|
||||
from astroid.inference_tip import inference_tip
|
||||
from astroid.interpreter import objectmodel
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import AssignName, Attribute, Call, Name
|
||||
from astroid.nodes.scoped_nodes import FunctionDef
|
||||
from astroid.util import Uninferable
|
||||
|
||||
LRU_CACHE = "functools.lru_cache"
|
||||
|
||||
|
||||
class LruWrappedModel(objectmodel.FunctionModel):
|
||||
"""Special attribute model for functions decorated with functools.lru_cache.
|
||||
|
||||
The said decorators patches at decoration time some functions onto
|
||||
the decorated function.
|
||||
"""
|
||||
|
||||
@property
|
||||
def attr___wrapped__(self):
|
||||
return self._instance
|
||||
|
||||
@property
|
||||
def attr_cache_info(self):
|
||||
cache_info = extract_node(
|
||||
"""
|
||||
from functools import _CacheInfo
|
||||
_CacheInfo(0, 0, 0, 0)
|
||||
"""
|
||||
)
|
||||
|
||||
class CacheInfoBoundMethod(BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield helpers.safe_infer(cache_info)
|
||||
|
||||
return CacheInfoBoundMethod(proxy=self._instance, bound=self._instance)
|
||||
|
||||
@property
|
||||
def attr_cache_clear(self):
|
||||
node = extract_node("""def cache_clear(self): pass""")
|
||||
return BoundMethod(proxy=node, bound=self._instance.parent.scope())
|
||||
|
||||
|
||||
def _transform_lru_cache(node, context=None) -> None:
|
||||
# TODO: this is not ideal, since the node should be immutable,
|
||||
# but due to https://github.com/PyCQA/astroid/issues/354,
|
||||
# there's not much we can do now.
|
||||
# Replacing the node would work partially, because,
|
||||
# in pylint, the old node would still be available, leading
|
||||
# to spurious false positives.
|
||||
node.special_attributes = LruWrappedModel()(node)
|
||||
|
||||
|
||||
def _functools_partial_inference(node, context=None):
|
||||
call = arguments.CallSite.from_call(node, context=context)
|
||||
number_of_positional = len(call.positional_arguments)
|
||||
if number_of_positional < 1:
|
||||
raise UseInferenceDefault("functools.partial takes at least one argument")
|
||||
if number_of_positional == 1 and not call.keyword_arguments:
|
||||
raise UseInferenceDefault(
|
||||
"functools.partial needs at least to have some filled arguments"
|
||||
)
|
||||
|
||||
partial_function = call.positional_arguments[0]
|
||||
try:
|
||||
inferred_wrapped_function = next(partial_function.infer(context=context))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if inferred_wrapped_function is Uninferable:
|
||||
raise UseInferenceDefault("Cannot infer the wrapped function")
|
||||
if not isinstance(inferred_wrapped_function, FunctionDef):
|
||||
raise UseInferenceDefault("The wrapped function is not a function")
|
||||
|
||||
# Determine if the passed keywords into the callsite are supported
|
||||
# by the wrapped function.
|
||||
if not inferred_wrapped_function.args:
|
||||
function_parameters = []
|
||||
else:
|
||||
function_parameters = chain(
|
||||
inferred_wrapped_function.args.args or (),
|
||||
inferred_wrapped_function.args.posonlyargs or (),
|
||||
inferred_wrapped_function.args.kwonlyargs or (),
|
||||
)
|
||||
parameter_names = {
|
||||
param.name for param in function_parameters if isinstance(param, AssignName)
|
||||
}
|
||||
if set(call.keyword_arguments) - parameter_names:
|
||||
raise UseInferenceDefault("wrapped function received unknown parameters")
|
||||
|
||||
partial_function = objects.PartialFunction(
|
||||
call,
|
||||
name=inferred_wrapped_function.name,
|
||||
doc=inferred_wrapped_function.doc,
|
||||
lineno=inferred_wrapped_function.lineno,
|
||||
col_offset=inferred_wrapped_function.col_offset,
|
||||
parent=node.parent,
|
||||
)
|
||||
partial_function.postinit(
|
||||
args=inferred_wrapped_function.args,
|
||||
body=inferred_wrapped_function.body,
|
||||
decorators=inferred_wrapped_function.decorators,
|
||||
returns=inferred_wrapped_function.returns,
|
||||
type_comment_returns=inferred_wrapped_function.type_comment_returns,
|
||||
type_comment_args=inferred_wrapped_function.type_comment_args,
|
||||
)
|
||||
return iter((partial_function,))
|
||||
|
||||
|
||||
def _looks_like_lru_cache(node):
|
||||
"""Check if the given function node is decorated with lru_cache."""
|
||||
if not node.decorators:
|
||||
return False
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, Call):
|
||||
continue
|
||||
if _looks_like_functools_member(decorator, "lru_cache"):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _looks_like_functools_member(node, member) -> bool:
|
||||
"""Check if the given Call node is a functools.partial call"""
|
||||
if isinstance(node.func, Name):
|
||||
return node.func.name == member
|
||||
if isinstance(node.func, Attribute):
|
||||
return (
|
||||
node.func.attrname == member
|
||||
and isinstance(node.func.expr, Name)
|
||||
and node.func.expr.name == "functools"
|
||||
)
|
||||
return False
|
||||
|
||||
|
||||
_looks_like_partial = partial(_looks_like_functools_member, member="partial")
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
FunctionDef, _transform_lru_cache, _looks_like_lru_cache
|
||||
)
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
Call,
|
||||
inference_tip(_functools_partial_inference),
|
||||
_looks_like_partial,
|
||||
)
|
|
@ -1,262 +0,0 @@
|
|||
# Copyright (c) 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Cole Robinson <crobinso@redhat.com>
|
||||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 David Shea <dshea@redhat.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Giuseppe Scrivano <gscrivan@redhat.com>
|
||||
# Copyright (c) 2018 Christoph Reiter <reiter.christoph@gmail.com>
|
||||
# Copyright (c) 2019 Philipp Hörist <philipp@hoerist.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for the Python 2 GObject introspection bindings.
|
||||
|
||||
Helps with understanding everything imported from 'gi.repository'
|
||||
"""
|
||||
|
||||
# pylint:disable=import-error,import-outside-toplevel
|
||||
|
||||
import inspect
|
||||
import itertools
|
||||
import re
|
||||
import sys
|
||||
import warnings
|
||||
|
||||
from astroid import nodes
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import AstroidBuildingError
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
_inspected_modules = {}
|
||||
|
||||
_identifier_re = r"^[A-Za-z_]\w*$"
|
||||
|
||||
_special_methods = frozenset(
|
||||
{
|
||||
"__lt__",
|
||||
"__le__",
|
||||
"__eq__",
|
||||
"__ne__",
|
||||
"__ge__",
|
||||
"__gt__",
|
||||
"__iter__",
|
||||
"__getitem__",
|
||||
"__setitem__",
|
||||
"__delitem__",
|
||||
"__len__",
|
||||
"__bool__",
|
||||
"__nonzero__",
|
||||
"__next__",
|
||||
"__str__",
|
||||
"__len__",
|
||||
"__contains__",
|
||||
"__enter__",
|
||||
"__exit__",
|
||||
"__repr__",
|
||||
"__getattr__",
|
||||
"__setattr__",
|
||||
"__delattr__",
|
||||
"__del__",
|
||||
"__hash__",
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
def _gi_build_stub(parent):
|
||||
"""
|
||||
Inspect the passed module recursively and build stubs for functions,
|
||||
classes, etc.
|
||||
"""
|
||||
classes = {}
|
||||
functions = {}
|
||||
constants = {}
|
||||
methods = {}
|
||||
for name in dir(parent):
|
||||
if name.startswith("__") and name not in _special_methods:
|
||||
continue
|
||||
|
||||
# Check if this is a valid name in python
|
||||
if not re.match(_identifier_re, name):
|
||||
continue
|
||||
|
||||
try:
|
||||
obj = getattr(parent, name)
|
||||
except AttributeError:
|
||||
continue
|
||||
|
||||
if inspect.isclass(obj):
|
||||
classes[name] = obj
|
||||
elif inspect.isfunction(obj) or inspect.isbuiltin(obj):
|
||||
functions[name] = obj
|
||||
elif inspect.ismethod(obj) or inspect.ismethoddescriptor(obj):
|
||||
methods[name] = obj
|
||||
elif (
|
||||
str(obj).startswith("<flags")
|
||||
or str(obj).startswith("<enum ")
|
||||
or str(obj).startswith("<GType ")
|
||||
or inspect.isdatadescriptor(obj)
|
||||
):
|
||||
constants[name] = 0
|
||||
elif isinstance(obj, (int, str)):
|
||||
constants[name] = obj
|
||||
elif callable(obj):
|
||||
# Fall back to a function for anything callable
|
||||
functions[name] = obj
|
||||
else:
|
||||
# Assume everything else is some manner of constant
|
||||
constants[name] = 0
|
||||
|
||||
ret = ""
|
||||
|
||||
if constants:
|
||||
ret += f"# {parent.__name__} constants\n\n"
|
||||
for name in sorted(constants):
|
||||
if name[0].isdigit():
|
||||
# GDK has some busted constant names like
|
||||
# Gdk.EventType.2BUTTON_PRESS
|
||||
continue
|
||||
|
||||
val = constants[name]
|
||||
|
||||
strval = str(val)
|
||||
if isinstance(val, str):
|
||||
strval = '"%s"' % str(val).replace("\\", "\\\\")
|
||||
ret += f"{name} = {strval}\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if functions:
|
||||
ret += f"# {parent.__name__} functions\n\n"
|
||||
for name in sorted(functions):
|
||||
ret += f"def {name}(*args, **kwargs):\n"
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if methods:
|
||||
ret += f"# {parent.__name__} methods\n\n"
|
||||
for name in sorted(methods):
|
||||
ret += f"def {name}(self, *args, **kwargs):\n"
|
||||
ret += " pass\n"
|
||||
|
||||
if ret:
|
||||
ret += "\n\n"
|
||||
if classes:
|
||||
ret += f"# {parent.__name__} classes\n\n"
|
||||
for name, obj in sorted(classes.items()):
|
||||
base = "object"
|
||||
if issubclass(obj, Exception):
|
||||
base = "Exception"
|
||||
ret += f"class {name}({base}):\n"
|
||||
|
||||
classret = _gi_build_stub(obj)
|
||||
if not classret:
|
||||
classret = "pass\n"
|
||||
|
||||
for line in classret.splitlines():
|
||||
ret += " " + line + "\n"
|
||||
ret += "\n"
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
def _import_gi_module(modname):
|
||||
# we only consider gi.repository submodules
|
||||
if not modname.startswith("gi.repository."):
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
# build astroid representation unless we already tried so
|
||||
if modname not in _inspected_modules:
|
||||
modnames = [modname]
|
||||
optional_modnames = []
|
||||
|
||||
# GLib and GObject may have some special case handling
|
||||
# in pygobject that we need to cope with. However at
|
||||
# least as of pygobject3-3.13.91 the _glib module doesn't
|
||||
# exist anymore, so if treat these modules as optional.
|
||||
if modname == "gi.repository.GLib":
|
||||
optional_modnames.append("gi._glib")
|
||||
elif modname == "gi.repository.GObject":
|
||||
optional_modnames.append("gi._gobject")
|
||||
|
||||
try:
|
||||
modcode = ""
|
||||
for m in itertools.chain(modnames, optional_modnames):
|
||||
try:
|
||||
with warnings.catch_warnings():
|
||||
# Just inspecting the code can raise gi deprecation
|
||||
# warnings, so ignore them.
|
||||
try:
|
||||
from gi import ( # pylint:disable=import-error
|
||||
PyGIDeprecationWarning,
|
||||
PyGIWarning,
|
||||
)
|
||||
|
||||
warnings.simplefilter("ignore", PyGIDeprecationWarning)
|
||||
warnings.simplefilter("ignore", PyGIWarning)
|
||||
except Exception: # pylint:disable=broad-except
|
||||
pass
|
||||
|
||||
__import__(m)
|
||||
modcode += _gi_build_stub(sys.modules[m])
|
||||
except ImportError:
|
||||
if m not in optional_modnames:
|
||||
raise
|
||||
except ImportError:
|
||||
astng = _inspected_modules[modname] = None
|
||||
else:
|
||||
astng = AstroidBuilder(AstroidManager()).string_build(modcode, modname)
|
||||
_inspected_modules[modname] = astng
|
||||
else:
|
||||
astng = _inspected_modules[modname]
|
||||
if astng is None:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
return astng
|
||||
|
||||
|
||||
def _looks_like_require_version(node):
|
||||
# Return whether this looks like a call to gi.require_version(<name>, <version>)
|
||||
# Only accept function calls with two constant arguments
|
||||
if len(node.args) != 2:
|
||||
return False
|
||||
|
||||
if not all(isinstance(arg, nodes.Const) for arg in node.args):
|
||||
return False
|
||||
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
if func.attrname != "require_version":
|
||||
return False
|
||||
if isinstance(func.expr, nodes.Name) and func.expr.name == "gi":
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == "require_version"
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def _register_require_version(node):
|
||||
# Load the gi.require_version locally
|
||||
try:
|
||||
import gi
|
||||
|
||||
gi.require_version(node.args[0].value, node.args[1].value)
|
||||
except Exception: # pylint:disable=broad-except
|
||||
pass
|
||||
|
||||
return node
|
||||
|
||||
|
||||
AstroidManager().register_failed_import_hook(_import_gi_module)
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call, _register_require_version, _looks_like_require_version
|
||||
)
|
|
@ -1,64 +0,0 @@
|
|||
# Copyright (c) 2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2018 David Poirier <david-poirier-csn@users.noreply.github.com>
|
||||
# Copyright (c) 2018 wgehalo <wgehalo@gmail.com>
|
||||
# Copyright (c) 2018 Ioana Tagirta <ioana.tagirta@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 David Gilman <davidgilman1@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _hashlib_transform():
|
||||
signature = "value=''"
|
||||
template = """
|
||||
class %(name)s(object):
|
||||
def __init__(self, %(signature)s): pass
|
||||
def digest(self):
|
||||
return %(digest)s
|
||||
def copy(self):
|
||||
return self
|
||||
def update(self, value): pass
|
||||
def hexdigest(self):
|
||||
return ''
|
||||
@property
|
||||
def name(self):
|
||||
return %(name)r
|
||||
@property
|
||||
def block_size(self):
|
||||
return 1
|
||||
@property
|
||||
def digest_size(self):
|
||||
return 1
|
||||
"""
|
||||
algorithms_with_signature = dict.fromkeys(
|
||||
["md5", "sha1", "sha224", "sha256", "sha384", "sha512"], signature
|
||||
)
|
||||
blake2b_signature = "data=b'', *, digest_size=64, key=b'', salt=b'', \
|
||||
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
|
||||
node_depth=0, inner_size=0, last_node=False"
|
||||
blake2s_signature = "data=b'', *, digest_size=32, key=b'', salt=b'', \
|
||||
person=b'', fanout=1, depth=1, leaf_size=0, node_offset=0, \
|
||||
node_depth=0, inner_size=0, last_node=False"
|
||||
new_algorithms = dict.fromkeys(
|
||||
["sha3_224", "sha3_256", "sha3_384", "sha3_512", "shake_128", "shake_256"],
|
||||
signature,
|
||||
)
|
||||
algorithms_with_signature.update(new_algorithms)
|
||||
algorithms_with_signature.update(
|
||||
{"blake2b": blake2b_signature, "blake2s": blake2s_signature}
|
||||
)
|
||||
classes = "".join(
|
||||
template % {"name": hashfunc, "digest": 'b""', "signature": signature}
|
||||
for hashfunc, signature in algorithms_with_signature.items()
|
||||
)
|
||||
return parse(classes)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "hashlib", _hashlib_transform)
|
|
@ -1,215 +0,0 @@
|
|||
# Copyright (c) 2019-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid brain hints for some of the `http` module."""
|
||||
import textwrap
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _http_transform():
|
||||
code = textwrap.dedent(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
_HTTPStatus = namedtuple('_HTTPStatus', 'value phrase description')
|
||||
|
||||
class HTTPStatus:
|
||||
|
||||
@property
|
||||
def phrase(self):
|
||||
return ""
|
||||
@property
|
||||
def value(self):
|
||||
return 0
|
||||
@property
|
||||
def description(self):
|
||||
return ""
|
||||
|
||||
# informational
|
||||
CONTINUE = _HTTPStatus(100, 'Continue', 'Request received, please continue')
|
||||
SWITCHING_PROTOCOLS = _HTTPStatus(101, 'Switching Protocols',
|
||||
'Switching to new protocol; obey Upgrade header')
|
||||
PROCESSING = _HTTPStatus(102, 'Processing', '')
|
||||
OK = _HTTPStatus(200, 'OK', 'Request fulfilled, document follows')
|
||||
CREATED = _HTTPStatus(201, 'Created', 'Document created, URL follows')
|
||||
ACCEPTED = _HTTPStatus(202, 'Accepted',
|
||||
'Request accepted, processing continues off-line')
|
||||
NON_AUTHORITATIVE_INFORMATION = _HTTPStatus(203,
|
||||
'Non-Authoritative Information', 'Request fulfilled from cache')
|
||||
NO_CONTENT = _HTTPStatus(204, 'No Content', 'Request fulfilled, nothing follows')
|
||||
RESET_CONTENT =_HTTPStatus(205, 'Reset Content', 'Clear input form for further input')
|
||||
PARTIAL_CONTENT = _HTTPStatus(206, 'Partial Content', 'Partial content follows')
|
||||
MULTI_STATUS = _HTTPStatus(207, 'Multi-Status', '')
|
||||
ALREADY_REPORTED = _HTTPStatus(208, 'Already Reported', '')
|
||||
IM_USED = _HTTPStatus(226, 'IM Used', '')
|
||||
MULTIPLE_CHOICES = _HTTPStatus(300, 'Multiple Choices',
|
||||
'Object has several resources -- see URI list')
|
||||
MOVED_PERMANENTLY = _HTTPStatus(301, 'Moved Permanently',
|
||||
'Object moved permanently -- see URI list')
|
||||
FOUND = _HTTPStatus(302, 'Found', 'Object moved temporarily -- see URI list')
|
||||
SEE_OTHER = _HTTPStatus(303, 'See Other', 'Object moved -- see Method and URL list')
|
||||
NOT_MODIFIED = _HTTPStatus(304, 'Not Modified',
|
||||
'Document has not changed since given time')
|
||||
USE_PROXY = _HTTPStatus(305, 'Use Proxy',
|
||||
'You must use proxy specified in Location to access this resource')
|
||||
TEMPORARY_REDIRECT = _HTTPStatus(307, 'Temporary Redirect',
|
||||
'Object moved temporarily -- see URI list')
|
||||
PERMANENT_REDIRECT = _HTTPStatus(308, 'Permanent Redirect',
|
||||
'Object moved permanently -- see URI list')
|
||||
BAD_REQUEST = _HTTPStatus(400, 'Bad Request',
|
||||
'Bad request syntax or unsupported method')
|
||||
UNAUTHORIZED = _HTTPStatus(401, 'Unauthorized',
|
||||
'No permission -- see authorization schemes')
|
||||
PAYMENT_REQUIRED = _HTTPStatus(402, 'Payment Required',
|
||||
'No payment -- see charging schemes')
|
||||
FORBIDDEN = _HTTPStatus(403, 'Forbidden',
|
||||
'Request forbidden -- authorization will not help')
|
||||
NOT_FOUND = _HTTPStatus(404, 'Not Found',
|
||||
'Nothing matches the given URI')
|
||||
METHOD_NOT_ALLOWED = _HTTPStatus(405, 'Method Not Allowed',
|
||||
'Specified method is invalid for this resource')
|
||||
NOT_ACCEPTABLE = _HTTPStatus(406, 'Not Acceptable',
|
||||
'URI not available in preferred format')
|
||||
PROXY_AUTHENTICATION_REQUIRED = _HTTPStatus(407,
|
||||
'Proxy Authentication Required',
|
||||
'You must authenticate with this proxy before proceeding')
|
||||
REQUEST_TIMEOUT = _HTTPStatus(408, 'Request Timeout',
|
||||
'Request timed out; try again later')
|
||||
CONFLICT = _HTTPStatus(409, 'Conflict', 'Request conflict')
|
||||
GONE = _HTTPStatus(410, 'Gone',
|
||||
'URI no longer exists and has been permanently removed')
|
||||
LENGTH_REQUIRED = _HTTPStatus(411, 'Length Required',
|
||||
'Client must specify Content-Length')
|
||||
PRECONDITION_FAILED = _HTTPStatus(412, 'Precondition Failed',
|
||||
'Precondition in headers is false')
|
||||
REQUEST_ENTITY_TOO_LARGE = _HTTPStatus(413, 'Request Entity Too Large',
|
||||
'Entity is too large')
|
||||
REQUEST_URI_TOO_LONG = _HTTPStatus(414, 'Request-URI Too Long',
|
||||
'URI is too long')
|
||||
UNSUPPORTED_MEDIA_TYPE = _HTTPStatus(415, 'Unsupported Media Type',
|
||||
'Entity body in unsupported format')
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE = _HTTPStatus(416,
|
||||
'Requested Range Not Satisfiable',
|
||||
'Cannot satisfy request range')
|
||||
EXPECTATION_FAILED = _HTTPStatus(417, 'Expectation Failed',
|
||||
'Expect condition could not be satisfied')
|
||||
MISDIRECTED_REQUEST = _HTTPStatus(421, 'Misdirected Request',
|
||||
'Server is not able to produce a response')
|
||||
UNPROCESSABLE_ENTITY = _HTTPStatus(422, 'Unprocessable Entity')
|
||||
LOCKED = _HTTPStatus(423, 'Locked')
|
||||
FAILED_DEPENDENCY = _HTTPStatus(424, 'Failed Dependency')
|
||||
UPGRADE_REQUIRED = _HTTPStatus(426, 'Upgrade Required')
|
||||
PRECONDITION_REQUIRED = _HTTPStatus(428, 'Precondition Required',
|
||||
'The origin server requires the request to be conditional')
|
||||
TOO_MANY_REQUESTS = _HTTPStatus(429, 'Too Many Requests',
|
||||
'The user has sent too many requests in '
|
||||
'a given amount of time ("rate limiting")')
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE = _HTTPStatus(431,
|
||||
'Request Header Fields Too Large',
|
||||
'The server is unwilling to process the request because its header '
|
||||
'fields are too large')
|
||||
UNAVAILABLE_FOR_LEGAL_REASONS = _HTTPStatus(451,
|
||||
'Unavailable For Legal Reasons',
|
||||
'The server is denying access to the '
|
||||
'resource as a consequence of a legal demand')
|
||||
INTERNAL_SERVER_ERROR = _HTTPStatus(500, 'Internal Server Error',
|
||||
'Server got itself in trouble')
|
||||
NOT_IMPLEMENTED = _HTTPStatus(501, 'Not Implemented',
|
||||
'Server does not support this operation')
|
||||
BAD_GATEWAY = _HTTPStatus(502, 'Bad Gateway',
|
||||
'Invalid responses from another server/proxy')
|
||||
SERVICE_UNAVAILABLE = _HTTPStatus(503, 'Service Unavailable',
|
||||
'The server cannot process the request due to a high load')
|
||||
GATEWAY_TIMEOUT = _HTTPStatus(504, 'Gateway Timeout',
|
||||
'The gateway server did not receive a timely response')
|
||||
HTTP_VERSION_NOT_SUPPORTED = _HTTPStatus(505, 'HTTP Version Not Supported',
|
||||
'Cannot fulfill request')
|
||||
VARIANT_ALSO_NEGOTIATES = _HTTPStatus(506, 'Variant Also Negotiates')
|
||||
INSUFFICIENT_STORAGE = _HTTPStatus(507, 'Insufficient Storage')
|
||||
LOOP_DETECTED = _HTTPStatus(508, 'Loop Detected')
|
||||
NOT_EXTENDED = _HTTPStatus(510, 'Not Extended')
|
||||
NETWORK_AUTHENTICATION_REQUIRED = _HTTPStatus(511,
|
||||
'Network Authentication Required',
|
||||
'The client needs to authenticate to gain network access')
|
||||
"""
|
||||
)
|
||||
return AstroidBuilder(AstroidManager()).string_build(code)
|
||||
|
||||
|
||||
def _http_client_transform():
|
||||
return AstroidBuilder(AstroidManager()).string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
from http import HTTPStatus
|
||||
|
||||
CONTINUE = HTTPStatus.CONTINUE
|
||||
SWITCHING_PROTOCOLS = HTTPStatus.SWITCHING_PROTOCOLS
|
||||
PROCESSING = HTTPStatus.PROCESSING
|
||||
OK = HTTPStatus.OK
|
||||
CREATED = HTTPStatus.CREATED
|
||||
ACCEPTED = HTTPStatus.ACCEPTED
|
||||
NON_AUTHORITATIVE_INFORMATION = HTTPStatus.NON_AUTHORITATIVE_INFORMATION
|
||||
NO_CONTENT = HTTPStatus.NO_CONTENT
|
||||
RESET_CONTENT = HTTPStatus.RESET_CONTENT
|
||||
PARTIAL_CONTENT = HTTPStatus.PARTIAL_CONTENT
|
||||
MULTI_STATUS = HTTPStatus.MULTI_STATUS
|
||||
ALREADY_REPORTED = HTTPStatus.ALREADY_REPORTED
|
||||
IM_USED = HTTPStatus.IM_USED
|
||||
MULTIPLE_CHOICES = HTTPStatus.MULTIPLE_CHOICES
|
||||
MOVED_PERMANENTLY = HTTPStatus.MOVED_PERMANENTLY
|
||||
FOUND = HTTPStatus.FOUND
|
||||
SEE_OTHER = HTTPStatus.SEE_OTHER
|
||||
NOT_MODIFIED = HTTPStatus.NOT_MODIFIED
|
||||
USE_PROXY = HTTPStatus.USE_PROXY
|
||||
TEMPORARY_REDIRECT = HTTPStatus.TEMPORARY_REDIRECT
|
||||
PERMANENT_REDIRECT = HTTPStatus.PERMANENT_REDIRECT
|
||||
BAD_REQUEST = HTTPStatus.BAD_REQUEST
|
||||
UNAUTHORIZED = HTTPStatus.UNAUTHORIZED
|
||||
PAYMENT_REQUIRED = HTTPStatus.PAYMENT_REQUIRED
|
||||
FORBIDDEN = HTTPStatus.FORBIDDEN
|
||||
NOT_FOUND = HTTPStatus.NOT_FOUND
|
||||
METHOD_NOT_ALLOWED = HTTPStatus.METHOD_NOT_ALLOWED
|
||||
NOT_ACCEPTABLE = HTTPStatus.NOT_ACCEPTABLE
|
||||
PROXY_AUTHENTICATION_REQUIRED = HTTPStatus.PROXY_AUTHENTICATION_REQUIRED
|
||||
REQUEST_TIMEOUT = HTTPStatus.REQUEST_TIMEOUT
|
||||
CONFLICT = HTTPStatus.CONFLICT
|
||||
GONE = HTTPStatus.GONE
|
||||
LENGTH_REQUIRED = HTTPStatus.LENGTH_REQUIRED
|
||||
PRECONDITION_FAILED = HTTPStatus.PRECONDITION_FAILED
|
||||
REQUEST_ENTITY_TOO_LARGE = HTTPStatus.REQUEST_ENTITY_TOO_LARGE
|
||||
REQUEST_URI_TOO_LONG = HTTPStatus.REQUEST_URI_TOO_LONG
|
||||
UNSUPPORTED_MEDIA_TYPE = HTTPStatus.UNSUPPORTED_MEDIA_TYPE
|
||||
REQUESTED_RANGE_NOT_SATISFIABLE = HTTPStatus.REQUESTED_RANGE_NOT_SATISFIABLE
|
||||
EXPECTATION_FAILED = HTTPStatus.EXPECTATION_FAILED
|
||||
UNPROCESSABLE_ENTITY = HTTPStatus.UNPROCESSABLE_ENTITY
|
||||
LOCKED = HTTPStatus.LOCKED
|
||||
FAILED_DEPENDENCY = HTTPStatus.FAILED_DEPENDENCY
|
||||
UPGRADE_REQUIRED = HTTPStatus.UPGRADE_REQUIRED
|
||||
PRECONDITION_REQUIRED = HTTPStatus.PRECONDITION_REQUIRED
|
||||
TOO_MANY_REQUESTS = HTTPStatus.TOO_MANY_REQUESTS
|
||||
REQUEST_HEADER_FIELDS_TOO_LARGE = HTTPStatus.REQUEST_HEADER_FIELDS_TOO_LARGE
|
||||
INTERNAL_SERVER_ERROR = HTTPStatus.INTERNAL_SERVER_ERROR
|
||||
NOT_IMPLEMENTED = HTTPStatus.NOT_IMPLEMENTED
|
||||
BAD_GATEWAY = HTTPStatus.BAD_GATEWAY
|
||||
SERVICE_UNAVAILABLE = HTTPStatus.SERVICE_UNAVAILABLE
|
||||
GATEWAY_TIMEOUT = HTTPStatus.GATEWAY_TIMEOUT
|
||||
HTTP_VERSION_NOT_SUPPORTED = HTTPStatus.HTTP_VERSION_NOT_SUPPORTED
|
||||
VARIANT_ALSO_NEGOTIATES = HTTPStatus.VARIANT_ALSO_NEGOTIATES
|
||||
INSUFFICIENT_STORAGE = HTTPStatus.INSUFFICIENT_STORAGE
|
||||
LOOP_DETECTED = HTTPStatus.LOOP_DETECTED
|
||||
NOT_EXTENDED = HTTPStatus.NOT_EXTENDED
|
||||
NETWORK_AUTHENTICATION_REQUIRED = HTTPStatus.NETWORK_AUTHENTICATION_REQUIRED
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "http", _http_transform)
|
||||
register_module_extender(AstroidManager(), "http.client", _http_client_transform)
|
|
@ -1,53 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
"""
|
||||
Astroid hook for the Hypothesis library.
|
||||
|
||||
Without this hook pylint reports no-value-for-parameter for use of strategies
|
||||
defined using the `@hypothesis.strategies.composite` decorator. For example:
|
||||
|
||||
from hypothesis import strategies as st
|
||||
|
||||
@st.composite
|
||||
def a_strategy(draw):
|
||||
return draw(st.integers())
|
||||
|
||||
a_strategy()
|
||||
|
||||
"""
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.scoped_nodes import FunctionDef
|
||||
|
||||
COMPOSITE_NAMES = (
|
||||
"composite",
|
||||
"st.composite",
|
||||
"strategies.composite",
|
||||
"hypothesis.strategies.composite",
|
||||
)
|
||||
|
||||
|
||||
def is_decorated_with_st_composite(node):
|
||||
"""Return True if a decorated node has @st.composite applied."""
|
||||
if node.decorators and node.args.args and node.args.args[0].name == "draw":
|
||||
for decorator_attribute in node.decorators.nodes:
|
||||
if decorator_attribute.as_string() in COMPOSITE_NAMES:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def remove_draw_parameter_from_composite_strategy(node):
|
||||
"""Given that the FunctionDef is decorated with @st.composite, remove the
|
||||
first argument (`draw`) - it's always supplied by Hypothesis so we don't
|
||||
need to emit the no-value-for-parameter lint.
|
||||
"""
|
||||
del node.args.args[0]
|
||||
del node.args.annotations[0]
|
||||
del node.args.type_comment_args[0]
|
||||
return node
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
node_class=FunctionDef,
|
||||
transform=remove_draw_parameter_from_composite_strategy,
|
||||
predicate=is_decorated_with_st_composite,
|
||||
)
|
|
@ -1,45 +0,0 @@
|
|||
# Copyright (c) 2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid brain hints for some of the _io C objects."""
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes import ClassDef
|
||||
|
||||
BUFFERED = {"BufferedWriter", "BufferedReader"}
|
||||
TextIOWrapper = "TextIOWrapper"
|
||||
FileIO = "FileIO"
|
||||
BufferedWriter = "BufferedWriter"
|
||||
|
||||
|
||||
def _generic_io_transform(node, name, cls):
|
||||
"""Transform the given name, by adding the given *class* as a member of the node."""
|
||||
|
||||
io_module = AstroidManager().ast_from_module_name("_io")
|
||||
attribute_object = io_module[cls]
|
||||
instance = attribute_object.instantiate_class()
|
||||
node.locals[name] = [instance]
|
||||
|
||||
|
||||
def _transform_text_io_wrapper(node):
|
||||
# This is not always correct, since it can vary with the type of the descriptor,
|
||||
# being stdout, stderr or stdin. But we cannot get access to the name of the
|
||||
# stream, which is why we are using the BufferedWriter class as a default
|
||||
# value
|
||||
return _generic_io_transform(node, name="buffer", cls=BufferedWriter)
|
||||
|
||||
|
||||
def _transform_buffered(node):
|
||||
return _generic_io_transform(node, name="raw", cls=FileIO)
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, _transform_buffered, lambda node: node.name in BUFFERED
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, _transform_text_io_wrapper, lambda node: node.name == TextIOWrapper
|
||||
)
|
|
@ -1,91 +0,0 @@
|
|||
# Copyright (c) 2012-2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Peter Kolbus <peter.kolbus@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def mechanize_transform():
|
||||
return AstroidBuilder(AstroidManager()).string_build(
|
||||
"""
|
||||
|
||||
class Browser(object):
|
||||
def __getattr__(self, name):
|
||||
return None
|
||||
def __getitem__(self, name):
|
||||
return None
|
||||
def __setitem__(self, name, val):
|
||||
return None
|
||||
def back(self, n=1):
|
||||
return None
|
||||
def clear_history(self):
|
||||
return None
|
||||
def click(self, *args, **kwds):
|
||||
return None
|
||||
def click_link(self, link=None, **kwds):
|
||||
return None
|
||||
def close(self):
|
||||
return None
|
||||
def encoding(self):
|
||||
return None
|
||||
def find_link(self, text=None, text_regex=None, name=None, name_regex=None, url=None, url_regex=None, tag=None, predicate=None, nr=0):
|
||||
return None
|
||||
def follow_link(self, link=None, **kwds):
|
||||
return None
|
||||
def forms(self):
|
||||
return None
|
||||
def geturl(self):
|
||||
return None
|
||||
def global_form(self):
|
||||
return None
|
||||
def links(self, **kwds):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
def open(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_novisit(self, url, data=None, timeout=None):
|
||||
return None
|
||||
def open_local_file(self, filename):
|
||||
return None
|
||||
def reload(self):
|
||||
return None
|
||||
def response(self):
|
||||
return None
|
||||
def select_form(self, name=None, predicate=None, nr=None, **attrs):
|
||||
return None
|
||||
def set_cookie(self, cookie_string):
|
||||
return None
|
||||
def set_handle_referer(self, handle):
|
||||
return None
|
||||
def set_header(self, header, value=None):
|
||||
return None
|
||||
def set_html(self, html, url="http://example.com/"):
|
||||
return None
|
||||
def set_response(self, response):
|
||||
return None
|
||||
def set_simple_cookie(self, name, value, domain, path='/'):
|
||||
return None
|
||||
def submit(self, *args, **kwds):
|
||||
return None
|
||||
def title(self):
|
||||
return None
|
||||
def viewing_html(self):
|
||||
return None
|
||||
def visit_response(self, response, request=None):
|
||||
return None
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "mechanize", mechanize_transform)
|
|
@ -1,112 +0,0 @@
|
|||
# Copyright (c) 2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 David Gilman <davidgilman1@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
from astroid.bases import BoundMethod
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.exceptions import InferenceError
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.scoped_nodes import FunctionDef
|
||||
|
||||
|
||||
def _multiprocessing_transform():
|
||||
module = parse(
|
||||
"""
|
||||
from multiprocessing.managers import SyncManager
|
||||
def Manager():
|
||||
return SyncManager()
|
||||
"""
|
||||
)
|
||||
# Multiprocessing uses a getattr lookup inside contexts,
|
||||
# in order to get the attributes they need. Since it's extremely
|
||||
# dynamic, we use this approach to fake it.
|
||||
node = parse(
|
||||
"""
|
||||
from multiprocessing.context import DefaultContext, BaseContext
|
||||
default = DefaultContext()
|
||||
base = BaseContext()
|
||||
"""
|
||||
)
|
||||
try:
|
||||
context = next(node["default"].infer())
|
||||
base = next(node["base"].infer())
|
||||
except (InferenceError, StopIteration):
|
||||
return module
|
||||
|
||||
for node in (context, base):
|
||||
for key, value in node.locals.items():
|
||||
if key.startswith("_"):
|
||||
continue
|
||||
|
||||
value = value[0]
|
||||
if isinstance(value, FunctionDef):
|
||||
# We need to rebound this, since otherwise
|
||||
# it will have an extra argument (self).
|
||||
value = BoundMethod(value, node)
|
||||
module[key] = value
|
||||
return module
|
||||
|
||||
|
||||
def _multiprocessing_managers_transform():
|
||||
return parse(
|
||||
"""
|
||||
import array
|
||||
import threading
|
||||
import multiprocessing.pool as pool
|
||||
import queue
|
||||
|
||||
class Namespace(object):
|
||||
pass
|
||||
|
||||
class Value(object):
|
||||
def __init__(self, typecode, value, lock=True):
|
||||
self._typecode = typecode
|
||||
self._value = value
|
||||
def get(self):
|
||||
return self._value
|
||||
def set(self, value):
|
||||
self._value = value
|
||||
def __repr__(self):
|
||||
return '%s(%r, %r)'%(type(self).__name__, self._typecode, self._value)
|
||||
value = property(get, set)
|
||||
|
||||
def Array(typecode, sequence, lock=True):
|
||||
return array.array(typecode, sequence)
|
||||
|
||||
class SyncManager(object):
|
||||
Queue = JoinableQueue = queue.Queue
|
||||
Event = threading.Event
|
||||
RLock = threading.RLock
|
||||
BoundedSemaphore = threading.BoundedSemaphore
|
||||
Condition = threading.Condition
|
||||
Barrier = threading.Barrier
|
||||
Pool = pool.Pool
|
||||
list = list
|
||||
dict = dict
|
||||
Value = Value
|
||||
Array = Array
|
||||
Namespace = Namespace
|
||||
__enter__ = lambda self: self
|
||||
__exit__ = lambda *args: args
|
||||
|
||||
def start(self, initializer=None, initargs=None):
|
||||
pass
|
||||
def shutdown(self):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "multiprocessing.managers", _multiprocessing_managers_transform
|
||||
)
|
||||
register_module_extender(
|
||||
AstroidManager(), "multiprocessing", _multiprocessing_transform
|
||||
)
|
|
@ -1,576 +0,0 @@
|
|||
# Copyright (c) 2012-2015 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Dmitry Pribysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2015 David Shea <dshea@redhat.com>
|
||||
# Copyright (c) 2015 Philip Lorenz <philip@bithub.de>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Mateusz Bysiek <mb@mbdev.pl>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Ram Rachum <ram@rachum.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Dimitri Prybysh <dmand@yandex.ru>
|
||||
# Copyright (c) 2021 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2021 pre-commit-ci[bot] <bot@noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for the Python standard library."""
|
||||
|
||||
import functools
|
||||
import keyword
|
||||
from textwrap import dedent
|
||||
|
||||
import astroid
|
||||
from astroid import arguments, inference_tip, nodes, util
|
||||
from astroid.builder import AstroidBuilder, extract_node
|
||||
from astroid.exceptions import (
|
||||
AstroidTypeError,
|
||||
AstroidValueError,
|
||||
InferenceError,
|
||||
MroError,
|
||||
UseInferenceDefault,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
|
||||
ENUM_BASE_NAMES = {
|
||||
"Enum",
|
||||
"IntEnum",
|
||||
"enum.Enum",
|
||||
"enum.IntEnum",
|
||||
"IntFlag",
|
||||
"enum.IntFlag",
|
||||
}
|
||||
|
||||
|
||||
def _infer_first(node, context):
|
||||
if node is util.Uninferable:
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
value = next(node.infer(context=context))
|
||||
except StopIteration as exc:
|
||||
raise InferenceError from exc
|
||||
if value is util.Uninferable:
|
||||
raise UseInferenceDefault()
|
||||
return value
|
||||
|
||||
|
||||
def _find_func_form_arguments(node, context):
|
||||
def _extract_namedtuple_arg_or_keyword( # pylint: disable=inconsistent-return-statements
|
||||
position, key_name=None
|
||||
):
|
||||
if len(args) > position:
|
||||
return _infer_first(args[position], context)
|
||||
if key_name and key_name in found_keywords:
|
||||
return _infer_first(found_keywords[key_name], context)
|
||||
|
||||
args = node.args
|
||||
keywords = node.keywords
|
||||
found_keywords = (
|
||||
{keyword.arg: keyword.value for keyword in keywords} if keywords else {}
|
||||
)
|
||||
|
||||
name = _extract_namedtuple_arg_or_keyword(position=0, key_name="typename")
|
||||
names = _extract_namedtuple_arg_or_keyword(position=1, key_name="field_names")
|
||||
if name and names:
|
||||
return name.value, names
|
||||
|
||||
raise UseInferenceDefault()
|
||||
|
||||
|
||||
def infer_func_form(node, base_type, context=None, enum=False):
|
||||
"""Specific inference function for namedtuple or Python 3 enum."""
|
||||
# node is a Call node, class name as first argument and generated class
|
||||
# attributes as second argument
|
||||
|
||||
# namedtuple or enums list of attributes can be a list of strings or a
|
||||
# whitespace-separate string
|
||||
try:
|
||||
name, names = _find_func_form_arguments(node, context)
|
||||
try:
|
||||
attributes = names.value.replace(",", " ").split()
|
||||
except AttributeError as exc:
|
||||
if not enum:
|
||||
attributes = [
|
||||
_infer_first(const, context).value for const in names.elts
|
||||
]
|
||||
else:
|
||||
# Enums supports either iterator of (name, value) pairs
|
||||
# or mappings.
|
||||
if hasattr(names, "items") and isinstance(names.items, list):
|
||||
attributes = [
|
||||
_infer_first(const[0], context).value
|
||||
for const in names.items
|
||||
if isinstance(const[0], nodes.Const)
|
||||
]
|
||||
elif hasattr(names, "elts"):
|
||||
# Enums can support either ["a", "b", "c"]
|
||||
# or [("a", 1), ("b", 2), ...], but they can't
|
||||
# be mixed.
|
||||
if all(isinstance(const, nodes.Tuple) for const in names.elts):
|
||||
attributes = [
|
||||
_infer_first(const.elts[0], context).value
|
||||
for const in names.elts
|
||||
if isinstance(const, nodes.Tuple)
|
||||
]
|
||||
else:
|
||||
attributes = [
|
||||
_infer_first(const, context).value for const in names.elts
|
||||
]
|
||||
else:
|
||||
raise AttributeError from exc
|
||||
if not attributes:
|
||||
raise AttributeError from exc
|
||||
except (AttributeError, InferenceError) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if not enum:
|
||||
# namedtuple maps sys.intern(str()) over over field_names
|
||||
attributes = [str(attr) for attr in attributes]
|
||||
# XXX this should succeed *unless* __str__/__repr__ is incorrect or throws
|
||||
# in which case we should not have inferred these values and raised earlier
|
||||
attributes = [attr for attr in attributes if " " not in attr]
|
||||
|
||||
# If we can't infer the name of the class, don't crash, up to this point
|
||||
# we know it is a namedtuple anyway.
|
||||
name = name or "Uninferable"
|
||||
# we want to return a Class node instance with proper attributes set
|
||||
class_node = nodes.ClassDef(name, "docstring")
|
||||
class_node.parent = node.parent
|
||||
# set base class=tuple
|
||||
class_node.bases.append(base_type)
|
||||
# XXX add __init__(*attributes) method
|
||||
for attr in attributes:
|
||||
fake_node = nodes.EmptyNode()
|
||||
fake_node.parent = class_node
|
||||
fake_node.attrname = attr
|
||||
class_node.instance_attrs[attr] = [fake_node]
|
||||
return class_node, name, attributes
|
||||
|
||||
|
||||
def _has_namedtuple_base(node):
|
||||
"""Predicate for class inference tip
|
||||
|
||||
:type node: ClassDef
|
||||
:rtype: bool
|
||||
"""
|
||||
return set(node.basenames) & TYPING_NAMEDTUPLE_BASENAMES
|
||||
|
||||
|
||||
def _looks_like(node, name):
|
||||
func = node.func
|
||||
if isinstance(func, nodes.Attribute):
|
||||
return func.attrname == name
|
||||
if isinstance(func, nodes.Name):
|
||||
return func.name == name
|
||||
return False
|
||||
|
||||
|
||||
_looks_like_namedtuple = functools.partial(_looks_like, name="namedtuple")
|
||||
_looks_like_enum = functools.partial(_looks_like, name="Enum")
|
||||
_looks_like_typing_namedtuple = functools.partial(_looks_like, name="NamedTuple")
|
||||
|
||||
|
||||
def infer_named_tuple(node, context=None):
|
||||
"""Specific inference function for namedtuple Call node"""
|
||||
tuple_base_name = nodes.Name(name="tuple", parent=node.root())
|
||||
class_node, name, attributes = infer_func_form(
|
||||
node, tuple_base_name, context=context
|
||||
)
|
||||
call_site = arguments.CallSite.from_call(node, context=context)
|
||||
node = extract_node("import collections; collections.namedtuple")
|
||||
try:
|
||||
|
||||
func = next(node.infer())
|
||||
except StopIteration as e:
|
||||
raise InferenceError(node=node) from e
|
||||
try:
|
||||
rename = next(call_site.infer_argument(func, "rename", context)).bool_value()
|
||||
except (InferenceError, StopIteration):
|
||||
rename = False
|
||||
|
||||
try:
|
||||
attributes = _check_namedtuple_attributes(name, attributes, rename)
|
||||
except AstroidTypeError as exc:
|
||||
raise UseInferenceDefault("TypeError: " + str(exc)) from exc
|
||||
except AstroidValueError as exc:
|
||||
raise UseInferenceDefault("ValueError: " + str(exc)) from exc
|
||||
|
||||
replace_args = ", ".join(f"{arg}=None" for arg in attributes)
|
||||
field_def = (
|
||||
" {name} = property(lambda self: self[{index:d}], "
|
||||
"doc='Alias for field number {index:d}')"
|
||||
)
|
||||
field_defs = "\n".join(
|
||||
field_def.format(name=name, index=index)
|
||||
for index, name in enumerate(attributes)
|
||||
)
|
||||
fake = AstroidBuilder(AstroidManager()).string_build(
|
||||
f"""
|
||||
class {name}(tuple):
|
||||
__slots__ = ()
|
||||
_fields = {attributes!r}
|
||||
def _asdict(self):
|
||||
return self.__dict__
|
||||
@classmethod
|
||||
def _make(cls, iterable, new=tuple.__new__, len=len):
|
||||
return new(cls, iterable)
|
||||
def _replace(self, {replace_args}):
|
||||
return self
|
||||
def __getnewargs__(self):
|
||||
return tuple(self)
|
||||
{field_defs}
|
||||
"""
|
||||
)
|
||||
class_node.locals["_asdict"] = fake.body[0].locals["_asdict"]
|
||||
class_node.locals["_make"] = fake.body[0].locals["_make"]
|
||||
class_node.locals["_replace"] = fake.body[0].locals["_replace"]
|
||||
class_node.locals["_fields"] = fake.body[0].locals["_fields"]
|
||||
for attr in attributes:
|
||||
class_node.locals[attr] = fake.body[0].locals[attr]
|
||||
# we use UseInferenceDefault, we can't be a generator so return an iterator
|
||||
return iter([class_node])
|
||||
|
||||
|
||||
def _get_renamed_namedtuple_attributes(field_names):
|
||||
names = list(field_names)
|
||||
seen = set()
|
||||
for i, name in enumerate(field_names):
|
||||
if (
|
||||
not all(c.isalnum() or c == "_" for c in name)
|
||||
or keyword.iskeyword(name)
|
||||
or not name
|
||||
or name[0].isdigit()
|
||||
or name.startswith("_")
|
||||
or name in seen
|
||||
):
|
||||
names[i] = "_%d" % i
|
||||
seen.add(name)
|
||||
return tuple(names)
|
||||
|
||||
|
||||
def _check_namedtuple_attributes(typename, attributes, rename=False):
|
||||
attributes = tuple(attributes)
|
||||
if rename:
|
||||
attributes = _get_renamed_namedtuple_attributes(attributes)
|
||||
|
||||
# The following snippet is derived from the CPython Lib/collections/__init__.py sources
|
||||
# <snippet>
|
||||
for name in (typename,) + attributes:
|
||||
if not isinstance(name, str):
|
||||
raise AstroidTypeError("Type names and field names must be strings")
|
||||
if not name.isidentifier():
|
||||
raise AstroidValueError(
|
||||
"Type names and field names must be valid" + f"identifiers: {name!r}"
|
||||
)
|
||||
if keyword.iskeyword(name):
|
||||
raise AstroidValueError(
|
||||
f"Type names and field names cannot be a keyword: {name!r}"
|
||||
)
|
||||
|
||||
seen = set()
|
||||
for name in attributes:
|
||||
if name.startswith("_") and not rename:
|
||||
raise AstroidValueError(
|
||||
f"Field names cannot start with an underscore: {name!r}"
|
||||
)
|
||||
if name in seen:
|
||||
raise AstroidValueError(f"Encountered duplicate field name: {name!r}")
|
||||
seen.add(name)
|
||||
# </snippet>
|
||||
|
||||
return attributes
|
||||
|
||||
|
||||
def infer_enum(node, context=None):
|
||||
"""Specific inference function for enum Call node."""
|
||||
enum_meta = extract_node(
|
||||
"""
|
||||
class EnumMeta(object):
|
||||
'docstring'
|
||||
def __call__(self, node):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return EnumAttribute()
|
||||
def __iter__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return [EnumAttribute()]
|
||||
def __reversed__(self):
|
||||
class EnumAttribute(object):
|
||||
name = ''
|
||||
value = 0
|
||||
return (EnumAttribute, )
|
||||
def __next__(self):
|
||||
return next(iter(self))
|
||||
def __getitem__(self, attr):
|
||||
class Value(object):
|
||||
@property
|
||||
def name(self):
|
||||
return ''
|
||||
@property
|
||||
def value(self):
|
||||
return attr
|
||||
|
||||
return Value()
|
||||
__members__ = ['']
|
||||
"""
|
||||
)
|
||||
class_node = infer_func_form(node, enum_meta, context=context, enum=True)[0]
|
||||
return iter([class_node.instantiate_class()])
|
||||
|
||||
|
||||
INT_FLAG_ADDITION_METHODS = """
|
||||
def __or__(self, other):
|
||||
return {name}(self.value | other.value)
|
||||
def __and__(self, other):
|
||||
return {name}(self.value & other.value)
|
||||
def __xor__(self, other):
|
||||
return {name}(self.value ^ other.value)
|
||||
def __add__(self, other):
|
||||
return {name}(self.value + other.value)
|
||||
def __div__(self, other):
|
||||
return {name}(self.value / other.value)
|
||||
def __invert__(self):
|
||||
return {name}(~self.value)
|
||||
def __mul__(self, other):
|
||||
return {name}(self.value * other.value)
|
||||
"""
|
||||
|
||||
|
||||
def infer_enum_class(node):
|
||||
"""Specific inference for enums."""
|
||||
for basename in (b for cls in node.mro() for b in cls.basenames):
|
||||
if basename not in ENUM_BASE_NAMES:
|
||||
continue
|
||||
if node.root().name == "enum":
|
||||
# Skip if the class is directly from enum module.
|
||||
break
|
||||
dunder_members = {}
|
||||
target_names = set()
|
||||
for local, values in node.locals.items():
|
||||
if any(not isinstance(value, nodes.AssignName) for value in values):
|
||||
continue
|
||||
|
||||
stmt = values[0].statement(future=True)
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.targets[0], nodes.Tuple):
|
||||
targets = stmt.targets[0].itered()
|
||||
else:
|
||||
targets = stmt.targets
|
||||
elif isinstance(stmt, nodes.AnnAssign):
|
||||
targets = [stmt.target]
|
||||
else:
|
||||
continue
|
||||
|
||||
inferred_return_value = None
|
||||
if isinstance(stmt, nodes.Assign):
|
||||
if isinstance(stmt.value, nodes.Const):
|
||||
if isinstance(stmt.value.value, str):
|
||||
inferred_return_value = repr(stmt.value.value)
|
||||
else:
|
||||
inferred_return_value = stmt.value.value
|
||||
else:
|
||||
inferred_return_value = stmt.value.as_string()
|
||||
|
||||
new_targets = []
|
||||
for target in targets:
|
||||
if isinstance(target, nodes.Starred):
|
||||
continue
|
||||
target_names.add(target.name)
|
||||
# Replace all the assignments with our mocked class.
|
||||
classdef = dedent(
|
||||
"""
|
||||
class {name}({types}):
|
||||
@property
|
||||
def value(self):
|
||||
return {return_value}
|
||||
@property
|
||||
def name(self):
|
||||
return "{name}"
|
||||
""".format(
|
||||
name=target.name,
|
||||
types=", ".join(node.basenames),
|
||||
return_value=inferred_return_value,
|
||||
)
|
||||
)
|
||||
if "IntFlag" in basename:
|
||||
# Alright, we need to add some additional methods.
|
||||
# Unfortunately we still can't infer the resulting objects as
|
||||
# Enum members, but once we'll be able to do that, the following
|
||||
# should result in some nice symbolic execution
|
||||
classdef += INT_FLAG_ADDITION_METHODS.format(name=target.name)
|
||||
|
||||
fake = AstroidBuilder(
|
||||
AstroidManager(), apply_transforms=False
|
||||
).string_build(classdef)[target.name]
|
||||
fake.parent = target.parent
|
||||
for method in node.mymethods():
|
||||
fake.locals[method.name] = [method]
|
||||
new_targets.append(fake.instantiate_class())
|
||||
dunder_members[local] = fake
|
||||
node.locals[local] = new_targets
|
||||
members = nodes.Dict(parent=node)
|
||||
members.postinit(
|
||||
[
|
||||
(nodes.Const(k, parent=members), nodes.Name(v.name, parent=members))
|
||||
for k, v in dunder_members.items()
|
||||
]
|
||||
)
|
||||
node.locals["__members__"] = [members]
|
||||
# The enum.Enum class itself defines two @DynamicClassAttribute data-descriptors
|
||||
# "name" and "value" (which we override in the mocked class for each enum member
|
||||
# above). When dealing with inference of an arbitrary instance of the enum
|
||||
# class, e.g. in a method defined in the class body like:
|
||||
# class SomeEnum(enum.Enum):
|
||||
# def method(self):
|
||||
# self.name # <- here
|
||||
# In the absence of an enum member called "name" or "value", these attributes
|
||||
# should resolve to the descriptor on that particular instance, i.e. enum member.
|
||||
# For "value", we have no idea what that should be, but for "name", we at least
|
||||
# know that it should be a string, so infer that as a guess.
|
||||
if "name" not in target_names:
|
||||
code = dedent(
|
||||
"""
|
||||
@property
|
||||
def name(self):
|
||||
return ''
|
||||
"""
|
||||
)
|
||||
name_dynamicclassattr = AstroidBuilder(AstroidManager()).string_build(code)[
|
||||
"name"
|
||||
]
|
||||
node.locals["name"] = [name_dynamicclassattr]
|
||||
break
|
||||
return node
|
||||
|
||||
|
||||
def infer_typing_namedtuple_class(class_node, context=None):
|
||||
"""Infer a subclass of typing.NamedTuple"""
|
||||
# Check if it has the corresponding bases
|
||||
annassigns_fields = [
|
||||
annassign.target.name
|
||||
for annassign in class_node.body
|
||||
if isinstance(annassign, nodes.AnnAssign)
|
||||
]
|
||||
code = dedent(
|
||||
"""
|
||||
from collections import namedtuple
|
||||
namedtuple({typename!r}, {fields!r})
|
||||
"""
|
||||
).format(typename=class_node.name, fields=",".join(annassigns_fields))
|
||||
node = extract_node(code)
|
||||
try:
|
||||
generated_class_node = next(infer_named_tuple(node, context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(node=node, context=context) from e
|
||||
for method in class_node.mymethods():
|
||||
generated_class_node.locals[method.name] = [method]
|
||||
|
||||
for body_node in class_node.body:
|
||||
if isinstance(body_node, nodes.Assign):
|
||||
for target in body_node.targets:
|
||||
attr = target.name
|
||||
generated_class_node.locals[attr] = class_node.locals[attr]
|
||||
elif isinstance(body_node, nodes.ClassDef):
|
||||
generated_class_node.locals[body_node.name] = [body_node]
|
||||
|
||||
return iter((generated_class_node,))
|
||||
|
||||
|
||||
def infer_typing_namedtuple_function(node, context=None):
|
||||
"""
|
||||
Starting with python3.9, NamedTuple is a function of the typing module.
|
||||
The class NamedTuple is build dynamically through a call to `type` during
|
||||
initialization of the `_NamedTuple` variable.
|
||||
"""
|
||||
klass = extract_node(
|
||||
"""
|
||||
from typing import _NamedTuple
|
||||
_NamedTuple
|
||||
"""
|
||||
)
|
||||
return klass.infer(context)
|
||||
|
||||
|
||||
def infer_typing_namedtuple(node, context=None):
|
||||
"""Infer a typing.NamedTuple(...) call."""
|
||||
# This is essentially a namedtuple with different arguments
|
||||
# so we extract the args and infer a named tuple.
|
||||
try:
|
||||
func = next(node.func.infer())
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if func.qname() != "typing.NamedTuple":
|
||||
raise UseInferenceDefault
|
||||
|
||||
if len(node.args) != 2:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(node.args[1], (nodes.List, nodes.Tuple)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
names = []
|
||||
for elt in node.args[1].elts:
|
||||
if not isinstance(elt, (nodes.List, nodes.Tuple)):
|
||||
raise UseInferenceDefault
|
||||
if len(elt.elts) != 2:
|
||||
raise UseInferenceDefault
|
||||
names.append(elt.elts[0].as_string())
|
||||
|
||||
typename = node.args[0].as_string()
|
||||
if names:
|
||||
field_names = f"({','.join(names)},)"
|
||||
else:
|
||||
field_names = "''"
|
||||
node = extract_node(f"namedtuple({typename}, {field_names})")
|
||||
return infer_named_tuple(node, context)
|
||||
|
||||
|
||||
def _is_enum_subclass(cls: astroid.ClassDef) -> bool:
|
||||
"""Return whether cls is a subclass of an Enum."""
|
||||
try:
|
||||
return any(
|
||||
klass.name in ENUM_BASE_NAMES
|
||||
and getattr(klass.root(), "name", None) == "enum"
|
||||
for klass in cls.mro()
|
||||
)
|
||||
except MroError:
|
||||
return False
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call, inference_tip(infer_named_tuple), _looks_like_namedtuple
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call, inference_tip(infer_enum), _looks_like_enum
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.ClassDef, infer_enum_class, predicate=_is_enum_subclass
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.ClassDef, inference_tip(infer_typing_namedtuple_class), _has_namedtuple_base
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.FunctionDef,
|
||||
inference_tip(infer_typing_namedtuple_function),
|
||||
lambda node: node.name == "NamedTuple"
|
||||
and getattr(node.root(), "name", None) == "typing",
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call, inference_tip(infer_typing_namedtuple), _looks_like_typing_namedtuple
|
||||
)
|
|
@ -1,85 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Hooks for nose library."""
|
||||
|
||||
import re
|
||||
import textwrap
|
||||
|
||||
import astroid.builder
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.exceptions import InferenceError
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
_BUILDER = astroid.builder.AstroidBuilder(AstroidManager())
|
||||
|
||||
|
||||
CAPITALS = re.compile("([A-Z])")
|
||||
|
||||
|
||||
def _pep8(name, caps=CAPITALS):
|
||||
return caps.sub(lambda m: "_" + m.groups()[0].lower(), name)
|
||||
|
||||
|
||||
def _nose_tools_functions():
|
||||
"""Get an iterator of names and bound methods."""
|
||||
module = _BUILDER.string_build(
|
||||
textwrap.dedent(
|
||||
"""
|
||||
import unittest
|
||||
|
||||
class Test(unittest.TestCase):
|
||||
pass
|
||||
a = Test()
|
||||
"""
|
||||
)
|
||||
)
|
||||
try:
|
||||
case = next(module["a"].infer())
|
||||
except (InferenceError, StopIteration):
|
||||
return
|
||||
for method in case.methods():
|
||||
if method.name.startswith("assert") and "_" not in method.name:
|
||||
pep8_name = _pep8(method.name)
|
||||
yield pep8_name, astroid.BoundMethod(method, case)
|
||||
if method.name == "assertEqual":
|
||||
# nose also exports assert_equals.
|
||||
yield "assert_equals", astroid.BoundMethod(method, case)
|
||||
|
||||
|
||||
def _nose_tools_transform(node):
|
||||
for method_name, method in _nose_tools_functions():
|
||||
node.locals[method_name] = [method]
|
||||
|
||||
|
||||
def _nose_tools_trivial_transform():
|
||||
"""Custom transform for the nose.tools module."""
|
||||
stub = _BUILDER.string_build("""__all__ = []""")
|
||||
all_entries = ["ok_", "eq_"]
|
||||
|
||||
for pep8_name, method in _nose_tools_functions():
|
||||
all_entries.append(pep8_name)
|
||||
stub[pep8_name] = method
|
||||
|
||||
# Update the __all__ variable, since nose.tools
|
||||
# does this manually with .append.
|
||||
all_assign = stub["__all__"].parent
|
||||
all_object = astroid.List(all_entries)
|
||||
all_object.parent = all_assign
|
||||
all_assign.value = all_object
|
||||
return stub
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "nose.tools.trivial", _nose_tools_trivial_transform
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
astroid.Module, _nose_tools_transform, lambda n: n.name == "nose.tools"
|
||||
)
|
|
@ -1,27 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.fromnumeric module."""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def numpy_core_fromnumeric_transform():
|
||||
return parse(
|
||||
"""
|
||||
def sum(a, axis=None, dtype=None, out=None, keepdims=None, initial=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "numpy.core.fromnumeric", numpy_core_fromnumeric_transform
|
||||
)
|
|
@ -1,34 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.function_base module."""
|
||||
|
||||
import functools
|
||||
|
||||
from astroid.brain.brain_numpy_utils import infer_numpy_member, looks_like_numpy_member
|
||||
from astroid.inference_tip import inference_tip
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import Attribute
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"linspace": """def linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"logspace": """def logspace(start, stop, num=50, endpoint=True, base=10.0, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"geomspace": """def geomspace(start, stop, num=50, endpoint=True, dtype=None, axis=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
}
|
||||
|
||||
for func_name, func_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, func_src)
|
||||
AstroidManager().register_transform(
|
||||
Attribute,
|
||||
inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, func_name),
|
||||
)
|
|
@ -1,100 +0,0 @@
|
|||
# Copyright (c) 2019-2020 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.multiarray module."""
|
||||
|
||||
import functools
|
||||
|
||||
from astroid.brain.brain_numpy_utils import infer_numpy_member, looks_like_numpy_member
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.inference_tip import inference_tip
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import Attribute, Name
|
||||
|
||||
|
||||
def numpy_core_multiarray_transform():
|
||||
return parse(
|
||||
"""
|
||||
# different functions defined in multiarray.py
|
||||
def inner(a, b):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
def vdot(a, b):
|
||||
return numpy.ndarray([0, 0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "numpy.core.multiarray", numpy_core_multiarray_transform
|
||||
)
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"array": """def array(object, dtype=None, copy=True, order='K', subok=False, ndmin=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"dot": """def dot(a, b, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"empty_like": """def empty_like(a, dtype=None, order='K', subok=True):
|
||||
return numpy.ndarray((0, 0))""",
|
||||
"concatenate": """def concatenate(arrays, axis=None, out=None):
|
||||
return numpy.ndarray((0, 0))""",
|
||||
"where": """def where(condition, x=None, y=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"empty": """def empty(shape, dtype=float, order='C'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"bincount": """def bincount(x, weights=None, minlength=0):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"busday_count": """def busday_count(begindates, enddates, weekmask='1111100', holidays=[], busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"busday_offset": """def busday_offset(dates, offsets, roll='raise', weekmask='1111100', holidays=None, busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"can_cast": """def can_cast(from_, to, casting='safe'):
|
||||
return True""",
|
||||
"copyto": """def copyto(dst, src, casting='same_kind', where=True):
|
||||
return None""",
|
||||
"datetime_as_string": """def datetime_as_string(arr, unit=None, timezone='naive', casting='same_kind'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"is_busday": """def is_busday(dates, weekmask='1111100', holidays=None, busdaycal=None, out=None):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"lexsort": """def lexsort(keys, axis=-1):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"may_share_memory": """def may_share_memory(a, b, max_work=None):
|
||||
return True""",
|
||||
# Not yet available because dtype is not yet present in those brains
|
||||
# "min_scalar_type": """def min_scalar_type(a):
|
||||
# return numpy.dtype('int16')""",
|
||||
"packbits": """def packbits(a, axis=None, bitorder='big'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
# Not yet available because dtype is not yet present in those brains
|
||||
# "result_type": """def result_type(*arrays_and_dtypes):
|
||||
# return numpy.dtype('int16')""",
|
||||
"shares_memory": """def shares_memory(a, b, max_work=None):
|
||||
return True""",
|
||||
"unpackbits": """def unpackbits(a, axis=None, count=None, bitorder='big'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
"unravel_index": """def unravel_index(indices, shape, order='C'):
|
||||
return (numpy.ndarray([0, 0]),)""",
|
||||
"zeros": """def zeros(shape, dtype=float, order='C'):
|
||||
return numpy.ndarray([0, 0])""",
|
||||
}
|
||||
|
||||
for method_name, function_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, function_src)
|
||||
AstroidManager().register_transform(
|
||||
Attribute,
|
||||
inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
Name,
|
||||
inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
|
@ -1,51 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for numpy.core.numeric module."""
|
||||
|
||||
import functools
|
||||
|
||||
from astroid.brain.brain_numpy_utils import infer_numpy_member, looks_like_numpy_member
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.inference_tip import inference_tip
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import Attribute
|
||||
|
||||
|
||||
def numpy_core_numeric_transform():
|
||||
return parse(
|
||||
"""
|
||||
# different functions defined in numeric.py
|
||||
import numpy
|
||||
def zeros_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
def ones_like(a, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
def full_like(a, fill_value, dtype=None, order='K', subok=True): return numpy.ndarray((0, 0))
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "numpy.core.numeric", numpy_core_numeric_transform
|
||||
)
|
||||
|
||||
|
||||
METHODS_TO_BE_INFERRED = {
|
||||
"ones": """def ones(shape, dtype=None, order='C'):
|
||||
return numpy.ndarray([0, 0])"""
|
||||
}
|
||||
|
||||
|
||||
for method_name, function_src in METHODS_TO_BE_INFERRED.items():
|
||||
inference_function = functools.partial(infer_numpy_member, function_src)
|
||||
AstroidManager().register_transform(
|
||||
Attribute,
|
||||
inference_tip(inference_function),
|
||||
functools.partial(looks_like_numpy_member, method_name),
|
||||
)
|
|
@ -1,267 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
# TODO(hippo91) : correct the methods signature.
|
||||
|
||||
"""Astroid hooks for numpy.core.numerictypes module."""
|
||||
from astroid.brain.brain_numpy_utils import numpy_supports_type_hints
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def numpy_core_numerictypes_transform():
|
||||
# TODO: Uniformize the generic API with the ndarray one.
|
||||
# According to numpy doc the generic object should expose
|
||||
# the same API than ndarray. This has been done here partially
|
||||
# through the astype method.
|
||||
generic_src = """
|
||||
class generic(object):
|
||||
def __init__(self, value):
|
||||
self.T = np.ndarray([0, 0])
|
||||
self.base = None
|
||||
self.data = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
# Should be a numpy.flatiter instance but not available for now
|
||||
# Putting an array instead so that iteration and indexing are authorized
|
||||
self.flat = np.ndarray([0, 0])
|
||||
self.imag = None
|
||||
self.itemsize = None
|
||||
self.nbytes = None
|
||||
self.ndim = None
|
||||
self.real = None
|
||||
self.size = None
|
||||
self.strides = None
|
||||
|
||||
def all(self): return uninferable
|
||||
def any(self): return uninferable
|
||||
def argmax(self): return uninferable
|
||||
def argmin(self): return uninferable
|
||||
def argsort(self): return uninferable
|
||||
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
|
||||
def base(self): return uninferable
|
||||
def byteswap(self): return uninferable
|
||||
def choose(self): return uninferable
|
||||
def clip(self): return uninferable
|
||||
def compress(self): return uninferable
|
||||
def conj(self): return uninferable
|
||||
def conjugate(self): return uninferable
|
||||
def copy(self): return uninferable
|
||||
def cumprod(self): return uninferable
|
||||
def cumsum(self): return uninferable
|
||||
def data(self): return uninferable
|
||||
def diagonal(self): return uninferable
|
||||
def dtype(self): return uninferable
|
||||
def dump(self): return uninferable
|
||||
def dumps(self): return uninferable
|
||||
def fill(self): return uninferable
|
||||
def flags(self): return uninferable
|
||||
def flat(self): return uninferable
|
||||
def flatten(self): return uninferable
|
||||
def getfield(self): return uninferable
|
||||
def imag(self): return uninferable
|
||||
def item(self): return uninferable
|
||||
def itemset(self): return uninferable
|
||||
def itemsize(self): return uninferable
|
||||
def max(self): return uninferable
|
||||
def mean(self): return uninferable
|
||||
def min(self): return uninferable
|
||||
def nbytes(self): return uninferable
|
||||
def ndim(self): return uninferable
|
||||
def newbyteorder(self): return uninferable
|
||||
def nonzero(self): return uninferable
|
||||
def prod(self): return uninferable
|
||||
def ptp(self): return uninferable
|
||||
def put(self): return uninferable
|
||||
def ravel(self): return uninferable
|
||||
def real(self): return uninferable
|
||||
def repeat(self): return uninferable
|
||||
def reshape(self): return uninferable
|
||||
def resize(self): return uninferable
|
||||
def round(self): return uninferable
|
||||
def searchsorted(self): return uninferable
|
||||
def setfield(self): return uninferable
|
||||
def setflags(self): return uninferable
|
||||
def shape(self): return uninferable
|
||||
def size(self): return uninferable
|
||||
def sort(self): return uninferable
|
||||
def squeeze(self): return uninferable
|
||||
def std(self): return uninferable
|
||||
def strides(self): return uninferable
|
||||
def sum(self): return uninferable
|
||||
def swapaxes(self): return uninferable
|
||||
def take(self): return uninferable
|
||||
def tobytes(self): return uninferable
|
||||
def tofile(self): return uninferable
|
||||
def tolist(self): return uninferable
|
||||
def tostring(self): return uninferable
|
||||
def trace(self): return uninferable
|
||||
def transpose(self): return uninferable
|
||||
def var(self): return uninferable
|
||||
def view(self): return uninferable
|
||||
"""
|
||||
if numpy_supports_type_hints():
|
||||
generic_src += """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, value):
|
||||
return cls
|
||||
"""
|
||||
return parse(
|
||||
generic_src
|
||||
+ """
|
||||
class dtype(object):
|
||||
def __init__(self, obj, align=False, copy=False):
|
||||
self.alignment = None
|
||||
self.base = None
|
||||
self.byteorder = None
|
||||
self.char = None
|
||||
self.descr = None
|
||||
self.fields = None
|
||||
self.flags = None
|
||||
self.hasobject = None
|
||||
self.isalignedstruct = None
|
||||
self.isbuiltin = None
|
||||
self.isnative = None
|
||||
self.itemsize = None
|
||||
self.kind = None
|
||||
self.metadata = None
|
||||
self.name = None
|
||||
self.names = None
|
||||
self.num = None
|
||||
self.shape = None
|
||||
self.str = None
|
||||
self.subdtype = None
|
||||
self.type = None
|
||||
|
||||
def newbyteorder(self, new_order='S'): return uninferable
|
||||
def __neg__(self): return uninferable
|
||||
|
||||
class busdaycalendar(object):
|
||||
def __init__(self, weekmask='1111100', holidays=None):
|
||||
self.holidays = None
|
||||
self.weekmask = None
|
||||
|
||||
class flexible(generic): pass
|
||||
class bool_(generic): pass
|
||||
class number(generic):
|
||||
def __neg__(self): return uninferable
|
||||
class datetime64(generic):
|
||||
def __init__(self, nb, unit=None): pass
|
||||
|
||||
|
||||
class void(flexible):
|
||||
def __init__(self, *args, **kwargs):
|
||||
self.base = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
def getfield(self): return uninferable
|
||||
def setfield(self): return uninferable
|
||||
|
||||
|
||||
class character(flexible): pass
|
||||
|
||||
|
||||
class integer(number):
|
||||
def __init__(self, value):
|
||||
self.denominator = None
|
||||
self.numerator = None
|
||||
|
||||
|
||||
class inexact(number): pass
|
||||
|
||||
|
||||
class str_(str, character):
|
||||
def maketrans(self, x, y=None, z=None): return uninferable
|
||||
|
||||
|
||||
class bytes_(bytes, character):
|
||||
def fromhex(self, string): return uninferable
|
||||
def maketrans(self, frm, to): return uninferable
|
||||
|
||||
|
||||
class signedinteger(integer): pass
|
||||
|
||||
|
||||
class unsignedinteger(integer): pass
|
||||
|
||||
|
||||
class complexfloating(inexact): pass
|
||||
|
||||
|
||||
class floating(inexact): pass
|
||||
|
||||
|
||||
class float64(floating, float):
|
||||
def fromhex(self, string): return uninferable
|
||||
|
||||
|
||||
class uint64(unsignedinteger): pass
|
||||
class complex64(complexfloating): pass
|
||||
class int16(signedinteger): pass
|
||||
class float96(floating): pass
|
||||
class int8(signedinteger): pass
|
||||
class uint32(unsignedinteger): pass
|
||||
class uint8(unsignedinteger): pass
|
||||
class _typedict(dict): pass
|
||||
class complex192(complexfloating): pass
|
||||
class timedelta64(signedinteger):
|
||||
def __init__(self, nb, unit=None): pass
|
||||
class int32(signedinteger): pass
|
||||
class uint16(unsignedinteger): pass
|
||||
class float32(floating): pass
|
||||
class complex128(complexfloating, complex): pass
|
||||
class float16(floating): pass
|
||||
class int64(signedinteger): pass
|
||||
|
||||
buffer_type = memoryview
|
||||
bool8 = bool_
|
||||
byte = int8
|
||||
bytes0 = bytes_
|
||||
cdouble = complex128
|
||||
cfloat = complex128
|
||||
clongdouble = complex192
|
||||
clongfloat = complex192
|
||||
complex_ = complex128
|
||||
csingle = complex64
|
||||
double = float64
|
||||
float_ = float64
|
||||
half = float16
|
||||
int0 = int32
|
||||
int_ = int32
|
||||
intc = int32
|
||||
intp = int32
|
||||
long = int32
|
||||
longcomplex = complex192
|
||||
longdouble = float96
|
||||
longfloat = float96
|
||||
longlong = int64
|
||||
object0 = object_
|
||||
object_ = object_
|
||||
short = int16
|
||||
single = float32
|
||||
singlecomplex = complex64
|
||||
str0 = str_
|
||||
string_ = bytes_
|
||||
ubyte = uint8
|
||||
uint = uint32
|
||||
uint0 = uint32
|
||||
uintc = uint32
|
||||
uintp = uint32
|
||||
ulonglong = uint64
|
||||
unicode = str_
|
||||
unicode_ = str_
|
||||
ushort = uint16
|
||||
void0 = void
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "numpy.core.numerictypes", numpy_core_numerictypes_transform
|
||||
)
|
|
@ -1,158 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
# Note: starting with version 1.18 numpy module has `__getattr__` method which prevent
|
||||
# `pylint` to emit `no-member` message for all numpy's attributes. (see pylint's module
|
||||
# typecheck in `_emit_no_member` function)
|
||||
|
||||
"""Astroid hooks for numpy.core.umath module."""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def numpy_core_umath_transform():
|
||||
ufunc_optional_keyword_arguments = (
|
||||
"""out=None, where=True, casting='same_kind', order='K', """
|
||||
"""dtype=None, subok=True"""
|
||||
)
|
||||
return parse(
|
||||
"""
|
||||
class FakeUfunc:
|
||||
def __init__(self):
|
||||
self.__doc__ = str()
|
||||
self.__name__ = str()
|
||||
self.nin = 0
|
||||
self.nout = 0
|
||||
self.nargs = 0
|
||||
self.ntypes = 0
|
||||
self.types = None
|
||||
self.identity = None
|
||||
self.signature = None
|
||||
|
||||
@classmethod
|
||||
def reduce(cls, a, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def accumulate(cls, array, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def reduceat(cls, a, indices, axis=None, dtype=None, out=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def outer(cls, A, B, **kwargs):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
@classmethod
|
||||
def at(cls, a, indices, b=None):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncOneArg(FakeUfunc):
|
||||
def __call__(self, x, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncOneArgBis(FakeUfunc):
|
||||
def __call__(self, x, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0]), numpy.ndarray([0, 0])
|
||||
|
||||
class FakeUfuncTwoArgs(FakeUfunc):
|
||||
def __call__(self, x1, x2, {opt_args:s}):
|
||||
return numpy.ndarray([0, 0])
|
||||
|
||||
# Constants
|
||||
e = 2.718281828459045
|
||||
euler_gamma = 0.5772156649015329
|
||||
|
||||
# One arg functions with optional kwargs
|
||||
arccos = FakeUfuncOneArg()
|
||||
arccosh = FakeUfuncOneArg()
|
||||
arcsin = FakeUfuncOneArg()
|
||||
arcsinh = FakeUfuncOneArg()
|
||||
arctan = FakeUfuncOneArg()
|
||||
arctanh = FakeUfuncOneArg()
|
||||
cbrt = FakeUfuncOneArg()
|
||||
conj = FakeUfuncOneArg()
|
||||
conjugate = FakeUfuncOneArg()
|
||||
cosh = FakeUfuncOneArg()
|
||||
deg2rad = FakeUfuncOneArg()
|
||||
degrees = FakeUfuncOneArg()
|
||||
exp2 = FakeUfuncOneArg()
|
||||
expm1 = FakeUfuncOneArg()
|
||||
fabs = FakeUfuncOneArg()
|
||||
frexp = FakeUfuncOneArgBis()
|
||||
isfinite = FakeUfuncOneArg()
|
||||
isinf = FakeUfuncOneArg()
|
||||
log = FakeUfuncOneArg()
|
||||
log1p = FakeUfuncOneArg()
|
||||
log2 = FakeUfuncOneArg()
|
||||
logical_not = FakeUfuncOneArg()
|
||||
modf = FakeUfuncOneArgBis()
|
||||
negative = FakeUfuncOneArg()
|
||||
positive = FakeUfuncOneArg()
|
||||
rad2deg = FakeUfuncOneArg()
|
||||
radians = FakeUfuncOneArg()
|
||||
reciprocal = FakeUfuncOneArg()
|
||||
rint = FakeUfuncOneArg()
|
||||
sign = FakeUfuncOneArg()
|
||||
signbit = FakeUfuncOneArg()
|
||||
sinh = FakeUfuncOneArg()
|
||||
spacing = FakeUfuncOneArg()
|
||||
square = FakeUfuncOneArg()
|
||||
tan = FakeUfuncOneArg()
|
||||
tanh = FakeUfuncOneArg()
|
||||
trunc = FakeUfuncOneArg()
|
||||
|
||||
# Two args functions with optional kwargs
|
||||
add = FakeUfuncTwoArgs()
|
||||
bitwise_and = FakeUfuncTwoArgs()
|
||||
bitwise_or = FakeUfuncTwoArgs()
|
||||
bitwise_xor = FakeUfuncTwoArgs()
|
||||
copysign = FakeUfuncTwoArgs()
|
||||
divide = FakeUfuncTwoArgs()
|
||||
divmod = FakeUfuncTwoArgs()
|
||||
equal = FakeUfuncTwoArgs()
|
||||
float_power = FakeUfuncTwoArgs()
|
||||
floor_divide = FakeUfuncTwoArgs()
|
||||
fmax = FakeUfuncTwoArgs()
|
||||
fmin = FakeUfuncTwoArgs()
|
||||
fmod = FakeUfuncTwoArgs()
|
||||
greater = FakeUfuncTwoArgs()
|
||||
gcd = FakeUfuncTwoArgs()
|
||||
hypot = FakeUfuncTwoArgs()
|
||||
heaviside = FakeUfuncTwoArgs()
|
||||
lcm = FakeUfuncTwoArgs()
|
||||
ldexp = FakeUfuncTwoArgs()
|
||||
left_shift = FakeUfuncTwoArgs()
|
||||
less = FakeUfuncTwoArgs()
|
||||
logaddexp = FakeUfuncTwoArgs()
|
||||
logaddexp2 = FakeUfuncTwoArgs()
|
||||
logical_and = FakeUfuncTwoArgs()
|
||||
logical_or = FakeUfuncTwoArgs()
|
||||
logical_xor = FakeUfuncTwoArgs()
|
||||
maximum = FakeUfuncTwoArgs()
|
||||
minimum = FakeUfuncTwoArgs()
|
||||
multiply = FakeUfuncTwoArgs()
|
||||
nextafter = FakeUfuncTwoArgs()
|
||||
not_equal = FakeUfuncTwoArgs()
|
||||
power = FakeUfuncTwoArgs()
|
||||
remainder = FakeUfuncTwoArgs()
|
||||
right_shift = FakeUfuncTwoArgs()
|
||||
subtract = FakeUfuncTwoArgs()
|
||||
true_divide = FakeUfuncTwoArgs()
|
||||
""".format(
|
||||
opt_args=ufunc_optional_keyword_arguments
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "numpy.core.umath", numpy_core_umath_transform
|
||||
)
|
|
@ -1,28 +0,0 @@
|
|||
# Copyright (c) 2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
"""Astroid hooks for numpy ma module"""
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def numpy_ma_transform():
|
||||
"""
|
||||
Infer the call of the masked_where function
|
||||
|
||||
:param node: node to infer
|
||||
:param context: inference context
|
||||
"""
|
||||
return parse(
|
||||
"""
|
||||
import numpy.ma
|
||||
def masked_where(condition, a, copy=True):
|
||||
return numpy.ma.masked_array(a, mask=[])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "numpy.ma", numpy_ma_transform)
|
|
@ -1,165 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2017-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for numpy ndarray class."""
|
||||
from astroid.brain.brain_numpy_utils import numpy_supports_type_hints
|
||||
from astroid.builder import extract_node
|
||||
from astroid.inference_tip import inference_tip
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import Attribute
|
||||
|
||||
|
||||
def infer_numpy_ndarray(node, context=None):
|
||||
ndarray = """
|
||||
class ndarray(object):
|
||||
def __init__(self, shape, dtype=float, buffer=None, offset=0,
|
||||
strides=None, order=None):
|
||||
self.T = numpy.ndarray([0, 0])
|
||||
self.base = None
|
||||
self.ctypes = None
|
||||
self.data = None
|
||||
self.dtype = None
|
||||
self.flags = None
|
||||
# Should be a numpy.flatiter instance but not available for now
|
||||
# Putting an array instead so that iteration and indexing are authorized
|
||||
self.flat = np.ndarray([0, 0])
|
||||
self.imag = np.ndarray([0, 0])
|
||||
self.itemsize = None
|
||||
self.nbytes = None
|
||||
self.ndim = None
|
||||
self.real = np.ndarray([0, 0])
|
||||
self.shape = numpy.ndarray([0, 0])
|
||||
self.size = None
|
||||
self.strides = None
|
||||
|
||||
def __abs__(self): return numpy.ndarray([0, 0])
|
||||
def __add__(self, value): return numpy.ndarray([0, 0])
|
||||
def __and__(self, value): return numpy.ndarray([0, 0])
|
||||
def __array__(self, dtype=None): return numpy.ndarray([0, 0])
|
||||
def __array_wrap__(self, obj): return numpy.ndarray([0, 0])
|
||||
def __contains__(self, key): return True
|
||||
def __copy__(self): return numpy.ndarray([0, 0])
|
||||
def __deepcopy__(self, memo): return numpy.ndarray([0, 0])
|
||||
def __divmod__(self, value): return (numpy.ndarray([0, 0]), numpy.ndarray([0, 0]))
|
||||
def __eq__(self, value): return numpy.ndarray([0, 0])
|
||||
def __float__(self): return 0.
|
||||
def __floordiv__(self): return numpy.ndarray([0, 0])
|
||||
def __ge__(self, value): return numpy.ndarray([0, 0])
|
||||
def __getitem__(self, key): return uninferable
|
||||
def __gt__(self, value): return numpy.ndarray([0, 0])
|
||||
def __iadd__(self, value): return numpy.ndarray([0, 0])
|
||||
def __iand__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ifloordiv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ilshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __imod__(self, value): return numpy.ndarray([0, 0])
|
||||
def __imul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __int__(self): return 0
|
||||
def __invert__(self): return numpy.ndarray([0, 0])
|
||||
def __ior__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ipow__(self, value): return numpy.ndarray([0, 0])
|
||||
def __irshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __isub__(self, value): return numpy.ndarray([0, 0])
|
||||
def __itruediv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ixor__(self, value): return numpy.ndarray([0, 0])
|
||||
def __le__(self, value): return numpy.ndarray([0, 0])
|
||||
def __len__(self): return 1
|
||||
def __lshift__(self, value): return numpy.ndarray([0, 0])
|
||||
def __lt__(self, value): return numpy.ndarray([0, 0])
|
||||
def __matmul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __mod__(self, value): return numpy.ndarray([0, 0])
|
||||
def __mul__(self, value): return numpy.ndarray([0, 0])
|
||||
def __ne__(self, value): return numpy.ndarray([0, 0])
|
||||
def __neg__(self): return numpy.ndarray([0, 0])
|
||||
def __or__(self, value): return numpy.ndarray([0, 0])
|
||||
def __pos__(self): return numpy.ndarray([0, 0])
|
||||
def __pow__(self): return numpy.ndarray([0, 0])
|
||||
def __repr__(self): return str()
|
||||
def __rshift__(self): return numpy.ndarray([0, 0])
|
||||
def __setitem__(self, key, value): return uninferable
|
||||
def __str__(self): return str()
|
||||
def __sub__(self, value): return numpy.ndarray([0, 0])
|
||||
def __truediv__(self, value): return numpy.ndarray([0, 0])
|
||||
def __xor__(self, value): return numpy.ndarray([0, 0])
|
||||
def all(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def any(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def argmax(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def argmin(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def argpartition(self, kth, axis=-1, kind='introselect', order=None): return np.ndarray([0, 0])
|
||||
def argsort(self, axis=-1, kind='quicksort', order=None): return np.ndarray([0, 0])
|
||||
def astype(self, dtype, order='K', casting='unsafe', subok=True, copy=True): return np.ndarray([0, 0])
|
||||
def byteswap(self, inplace=False): return np.ndarray([0, 0])
|
||||
def choose(self, choices, out=None, mode='raise'): return np.ndarray([0, 0])
|
||||
def clip(self, min=None, max=None, out=None): return np.ndarray([0, 0])
|
||||
def compress(self, condition, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def conj(self): return np.ndarray([0, 0])
|
||||
def conjugate(self): return np.ndarray([0, 0])
|
||||
def copy(self, order='C'): return np.ndarray([0, 0])
|
||||
def cumprod(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def cumsum(self, axis=None, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def diagonal(self, offset=0, axis1=0, axis2=1): return np.ndarray([0, 0])
|
||||
def dot(self, b, out=None): return np.ndarray([0, 0])
|
||||
def dump(self, file): return None
|
||||
def dumps(self): return str()
|
||||
def fill(self, value): return None
|
||||
def flatten(self, order='C'): return np.ndarray([0, 0])
|
||||
def getfield(self, dtype, offset=0): return np.ndarray([0, 0])
|
||||
def item(self, *args): return uninferable
|
||||
def itemset(self, *args): return None
|
||||
def max(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def mean(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def min(self, axis=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def newbyteorder(self, new_order='S'): return np.ndarray([0, 0])
|
||||
def nonzero(self): return (1,)
|
||||
def partition(self, kth, axis=-1, kind='introselect', order=None): return None
|
||||
def prod(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def ptp(self, axis=None, out=None): return np.ndarray([0, 0])
|
||||
def put(self, indices, values, mode='raise'): return None
|
||||
def ravel(self, order='C'): return np.ndarray([0, 0])
|
||||
def repeat(self, repeats, axis=None): return np.ndarray([0, 0])
|
||||
def reshape(self, shape, order='C'): return np.ndarray([0, 0])
|
||||
def resize(self, new_shape, refcheck=True): return None
|
||||
def round(self, decimals=0, out=None): return np.ndarray([0, 0])
|
||||
def searchsorted(self, v, side='left', sorter=None): return np.ndarray([0, 0])
|
||||
def setfield(self, val, dtype, offset=0): return None
|
||||
def setflags(self, write=None, align=None, uic=None): return None
|
||||
def sort(self, axis=-1, kind='quicksort', order=None): return None
|
||||
def squeeze(self, axis=None): return np.ndarray([0, 0])
|
||||
def std(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
|
||||
def sum(self, axis=None, dtype=None, out=None, keepdims=False): return np.ndarray([0, 0])
|
||||
def swapaxes(self, axis1, axis2): return np.ndarray([0, 0])
|
||||
def take(self, indices, axis=None, out=None, mode='raise'): return np.ndarray([0, 0])
|
||||
def tobytes(self, order='C'): return b''
|
||||
def tofile(self, fid, sep="", format="%s"): return None
|
||||
def tolist(self, ): return []
|
||||
def tostring(self, order='C'): return b''
|
||||
def trace(self, offset=0, axis1=0, axis2=1, dtype=None, out=None): return np.ndarray([0, 0])
|
||||
def transpose(self, *axes): return np.ndarray([0, 0])
|
||||
def var(self, axis=None, dtype=None, out=None, ddof=0, keepdims=False): return np.ndarray([0, 0])
|
||||
def view(self, dtype=None, type=None): return np.ndarray([0, 0])
|
||||
"""
|
||||
if numpy_supports_type_hints():
|
||||
ndarray += """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, value):
|
||||
return cls
|
||||
"""
|
||||
node = extract_node(ndarray)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _looks_like_numpy_ndarray(node):
|
||||
return isinstance(node, Attribute) and node.attrname == "ndarray"
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
Attribute,
|
||||
inference_tip(infer_numpy_ndarray),
|
||||
_looks_like_numpy_ndarray,
|
||||
)
|
|
@ -1,75 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
# TODO(hippo91) : correct the functions return types
|
||||
"""Astroid hooks for numpy.random.mtrand module."""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def numpy_random_mtrand_transform():
|
||||
return parse(
|
||||
"""
|
||||
def beta(a, b, size=None): return uninferable
|
||||
def binomial(n, p, size=None): return uninferable
|
||||
def bytes(length): return uninferable
|
||||
def chisquare(df, size=None): return uninferable
|
||||
def choice(a, size=None, replace=True, p=None): return uninferable
|
||||
def dirichlet(alpha, size=None): return uninferable
|
||||
def exponential(scale=1.0, size=None): return uninferable
|
||||
def f(dfnum, dfden, size=None): return uninferable
|
||||
def gamma(shape, scale=1.0, size=None): return uninferable
|
||||
def geometric(p, size=None): return uninferable
|
||||
def get_state(): return uninferable
|
||||
def gumbel(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def hypergeometric(ngood, nbad, nsample, size=None): return uninferable
|
||||
def laplace(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def logistic(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def lognormal(mean=0.0, sigma=1.0, size=None): return uninferable
|
||||
def logseries(p, size=None): return uninferable
|
||||
def multinomial(n, pvals, size=None): return uninferable
|
||||
def multivariate_normal(mean, cov, size=None): return uninferable
|
||||
def negative_binomial(n, p, size=None): return uninferable
|
||||
def noncentral_chisquare(df, nonc, size=None): return uninferable
|
||||
def noncentral_f(dfnum, dfden, nonc, size=None): return uninferable
|
||||
def normal(loc=0.0, scale=1.0, size=None): return uninferable
|
||||
def pareto(a, size=None): return uninferable
|
||||
def permutation(x): return uninferable
|
||||
def poisson(lam=1.0, size=None): return uninferable
|
||||
def power(a, size=None): return uninferable
|
||||
def rand(*args): return uninferable
|
||||
def randint(low, high=None, size=None, dtype='l'):
|
||||
import numpy
|
||||
return numpy.ndarray((1,1))
|
||||
def randn(*args): return uninferable
|
||||
def random(size=None): return uninferable
|
||||
def random_integers(low, high=None, size=None): return uninferable
|
||||
def random_sample(size=None): return uninferable
|
||||
def rayleigh(scale=1.0, size=None): return uninferable
|
||||
def seed(seed=None): return uninferable
|
||||
def set_state(state): return uninferable
|
||||
def shuffle(x): return uninferable
|
||||
def standard_cauchy(size=None): return uninferable
|
||||
def standard_exponential(size=None): return uninferable
|
||||
def standard_gamma(shape, size=None): return uninferable
|
||||
def standard_normal(size=None): return uninferable
|
||||
def standard_t(df, size=None): return uninferable
|
||||
def triangular(left, mode, right, size=None): return uninferable
|
||||
def uniform(low=0.0, high=1.0, size=None): return uninferable
|
||||
def vonmises(mu, kappa, size=None): return uninferable
|
||||
def wald(mean, scale, size=None): return uninferable
|
||||
def weibull(a, size=None): return uninferable
|
||||
def zipf(a, size=None): return uninferable
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(
|
||||
AstroidManager(), "numpy.random.mtrand", numpy_random_mtrand_transform
|
||||
)
|
|
@ -1,91 +0,0 @@
|
|||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2019-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Different utilities for the numpy brains"""
|
||||
from typing import Tuple
|
||||
|
||||
from astroid.builder import extract_node
|
||||
from astroid.nodes.node_classes import Attribute, Import, Name, NodeNG
|
||||
|
||||
# Class subscript is available in numpy starting with version 1.20.0
|
||||
NUMPY_VERSION_TYPE_HINTS_SUPPORT = ("1", "20", "0")
|
||||
|
||||
|
||||
def numpy_supports_type_hints() -> bool:
|
||||
"""
|
||||
Returns True if numpy supports type hints
|
||||
"""
|
||||
np_ver = _get_numpy_version()
|
||||
return np_ver and np_ver > NUMPY_VERSION_TYPE_HINTS_SUPPORT
|
||||
|
||||
|
||||
def _get_numpy_version() -> Tuple[str, str, str]:
|
||||
"""
|
||||
Return the numpy version number if numpy can be imported. Otherwise returns
|
||||
('0', '0', '0')
|
||||
"""
|
||||
try:
|
||||
import numpy # pylint: disable=import-outside-toplevel
|
||||
|
||||
return tuple(numpy.version.version.split("."))
|
||||
except ImportError:
|
||||
return ("0", "0", "0")
|
||||
|
||||
|
||||
def infer_numpy_member(src, node, context=None):
|
||||
node = extract_node(src)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
def _is_a_numpy_module(node: Name) -> bool:
|
||||
"""
|
||||
Returns True if the node is a representation of a numpy module.
|
||||
|
||||
For example in :
|
||||
import numpy as np
|
||||
x = np.linspace(1, 2)
|
||||
The node <Name.np> is a representation of the numpy module.
|
||||
|
||||
:param node: node to test
|
||||
:return: True if the node is a representation of the numpy module.
|
||||
"""
|
||||
module_nickname = node.name
|
||||
potential_import_target = [
|
||||
x for x in node.lookup(module_nickname)[1] if isinstance(x, Import)
|
||||
]
|
||||
return any(
|
||||
("numpy", module_nickname) in target.names or ("numpy", None) in target.names
|
||||
for target in potential_import_target
|
||||
)
|
||||
|
||||
|
||||
def looks_like_numpy_member(member_name: str, node: NodeNG) -> bool:
|
||||
"""
|
||||
Returns True if the node is a member of numpy whose
|
||||
name is member_name.
|
||||
|
||||
:param member_name: name of the member
|
||||
:param node: node to test
|
||||
:return: True if the node is a member of numpy
|
||||
"""
|
||||
if (
|
||||
isinstance(node, Attribute)
|
||||
and node.attrname == member_name
|
||||
and isinstance(node.expr, Name)
|
||||
and _is_a_numpy_module(node.expr)
|
||||
):
|
||||
return True
|
||||
if (
|
||||
isinstance(node, Name)
|
||||
and node.name == member_name
|
||||
and node.root().name.startswith("numpy")
|
||||
):
|
||||
return True
|
||||
return False
|
|
@ -1,75 +0,0 @@
|
|||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
from astroid import parse
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def pkg_resources_transform():
|
||||
return parse(
|
||||
"""
|
||||
def require(*requirements):
|
||||
return pkg_resources.working_set.require(*requirements)
|
||||
|
||||
def run_script(requires, script_name):
|
||||
return pkg_resources.working_set.run_script(requires, script_name)
|
||||
|
||||
def iter_entry_points(group, name=None):
|
||||
return pkg_resources.working_set.iter_entry_points(group, name)
|
||||
|
||||
def resource_exists(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).has_resource(resource_name)
|
||||
|
||||
def resource_isdir(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).resource_isdir(
|
||||
resource_name)
|
||||
|
||||
def resource_filename(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_filename(
|
||||
self, resource_name)
|
||||
|
||||
def resource_stream(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_stream(
|
||||
self, resource_name)
|
||||
|
||||
def resource_string(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).get_resource_string(
|
||||
self, resource_name)
|
||||
|
||||
def resource_listdir(package_or_requirement, resource_name):
|
||||
return get_provider(package_or_requirement).resource_listdir(
|
||||
resource_name)
|
||||
|
||||
def extraction_error():
|
||||
pass
|
||||
|
||||
def get_cache_path(archive_name, names=()):
|
||||
extract_path = self.extraction_path or get_default_cache()
|
||||
target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
|
||||
return target_path
|
||||
|
||||
def postprocess(tempname, filename):
|
||||
pass
|
||||
|
||||
def set_extraction_path(path):
|
||||
pass
|
||||
|
||||
def cleanup_resources(force=False):
|
||||
pass
|
||||
|
||||
def get_distribution(dist):
|
||||
return Distribution(dist)
|
||||
|
||||
_namespace_packages = {}
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "pkg_resources", pkg_resources_transform)
|
|
@ -1,91 +0,0 @@
|
|||
# Copyright (c) 2014-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Jeff Quast <contact@jeffquast.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2016 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for pytest."""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def pytest_transform():
|
||||
return AstroidBuilder(AstroidManager()).string_build(
|
||||
"""
|
||||
|
||||
try:
|
||||
import _pytest.mark
|
||||
import _pytest.recwarn
|
||||
import _pytest.runner
|
||||
import _pytest.python
|
||||
import _pytest.skipping
|
||||
import _pytest.assertion
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
deprecated_call = _pytest.recwarn.deprecated_call
|
||||
warns = _pytest.recwarn.warns
|
||||
|
||||
exit = _pytest.runner.exit
|
||||
fail = _pytest.runner.fail
|
||||
skip = _pytest.runner.skip
|
||||
importorskip = _pytest.runner.importorskip
|
||||
|
||||
xfail = _pytest.skipping.xfail
|
||||
mark = _pytest.mark.MarkGenerator()
|
||||
raises = _pytest.python.raises
|
||||
|
||||
# New in pytest 3.0
|
||||
try:
|
||||
approx = _pytest.python.approx
|
||||
register_assert_rewrite = _pytest.assertion.register_assert_rewrite
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
|
||||
# Moved in pytest 3.0
|
||||
|
||||
try:
|
||||
import _pytest.freeze_support
|
||||
freeze_includes = _pytest.freeze_support.freeze_includes
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.genscript
|
||||
freeze_includes = _pytest.genscript.freeze_includes
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import _pytest.debugging
|
||||
set_trace = _pytest.debugging.pytestPDB().set_trace
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.pdb
|
||||
set_trace = _pytest.pdb.pytestPDB().set_trace
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
try:
|
||||
import _pytest.fixtures
|
||||
fixture = _pytest.fixtures.fixture
|
||||
yield_fixture = _pytest.fixtures.yield_fixture
|
||||
except ImportError:
|
||||
try:
|
||||
import _pytest.python
|
||||
fixture = _pytest.python.fixture
|
||||
yield_fixture = _pytest.python.yield_fixture
|
||||
except ImportError:
|
||||
pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "pytest", pytest_transform)
|
||||
register_module_extender(AstroidManager(), "py.test", pytest_transform)
|
|
@ -1,88 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2017 Roy Wright <roy@wright.org>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2019 Antoine Boellinger <aboellinger@hotmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for the PyQT library."""
|
||||
|
||||
from astroid import nodes, parse
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _looks_like_signal(node, signal_name="pyqtSignal"):
|
||||
if "__class__" in node.instance_attrs:
|
||||
try:
|
||||
cls = node.instance_attrs["__class__"][0]
|
||||
return cls.name == signal_name
|
||||
except AttributeError:
|
||||
# return False if the cls does not have a name attribute
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def transform_pyqt_signal(node):
|
||||
module = parse(
|
||||
"""
|
||||
class pyqtSignal(object):
|
||||
def connect(self, slot, type=None, no_receiver_check=False):
|
||||
pass
|
||||
def disconnect(self, slot):
|
||||
pass
|
||||
def emit(self, *args):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
signal_cls = module["pyqtSignal"]
|
||||
node.instance_attrs["emit"] = signal_cls["emit"]
|
||||
node.instance_attrs["disconnect"] = signal_cls["disconnect"]
|
||||
node.instance_attrs["connect"] = signal_cls["connect"]
|
||||
|
||||
|
||||
def transform_pyside_signal(node):
|
||||
module = parse(
|
||||
"""
|
||||
class NotPySideSignal(object):
|
||||
def connect(self, receiver, type=None):
|
||||
pass
|
||||
def disconnect(self, receiver):
|
||||
pass
|
||||
def emit(self, *args):
|
||||
pass
|
||||
"""
|
||||
)
|
||||
signal_cls = module["NotPySideSignal"]
|
||||
node.instance_attrs["connect"] = signal_cls["connect"]
|
||||
node.instance_attrs["disconnect"] = signal_cls["disconnect"]
|
||||
node.instance_attrs["emit"] = signal_cls["emit"]
|
||||
|
||||
|
||||
def pyqt4_qtcore_transform():
|
||||
return AstroidBuilder(AstroidManager()).string_build(
|
||||
"""
|
||||
|
||||
def SIGNAL(signal_name): pass
|
||||
|
||||
class QObject(object):
|
||||
def emit(self, signal): pass
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "PyQt4.QtCore", pyqt4_qtcore_transform)
|
||||
AstroidManager().register_transform(
|
||||
nodes.FunctionDef, transform_pyqt_signal, _looks_like_signal
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.ClassDef,
|
||||
transform_pyside_signal,
|
||||
lambda node: node.qname() in {"PySide.QtCore.Signal", "PySide2.QtCore.Signal"},
|
||||
)
|
|
@ -1,85 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
import random
|
||||
|
||||
from astroid import helpers
|
||||
from astroid.exceptions import UseInferenceDefault
|
||||
from astroid.inference_tip import inference_tip
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import (
|
||||
Attribute,
|
||||
Call,
|
||||
Const,
|
||||
EvaluatedObject,
|
||||
List,
|
||||
Name,
|
||||
Set,
|
||||
Tuple,
|
||||
)
|
||||
|
||||
ACCEPTED_ITERABLES_FOR_SAMPLE = (List, Set, Tuple)
|
||||
|
||||
|
||||
def _clone_node_with_lineno(node, parent, lineno):
|
||||
if isinstance(node, EvaluatedObject):
|
||||
node = node.original
|
||||
cls = node.__class__
|
||||
other_fields = node._other_fields
|
||||
_astroid_fields = node._astroid_fields
|
||||
init_params = {"lineno": lineno, "col_offset": node.col_offset, "parent": parent}
|
||||
postinit_params = {param: getattr(node, param) for param in _astroid_fields}
|
||||
if other_fields:
|
||||
init_params.update({param: getattr(node, param) for param in other_fields})
|
||||
new_node = cls(**init_params)
|
||||
if hasattr(node, "postinit") and _astroid_fields:
|
||||
new_node.postinit(**postinit_params)
|
||||
return new_node
|
||||
|
||||
|
||||
def infer_random_sample(node, context=None):
|
||||
if len(node.args) != 2:
|
||||
raise UseInferenceDefault
|
||||
|
||||
length = node.args[1]
|
||||
if not isinstance(length, Const):
|
||||
raise UseInferenceDefault
|
||||
if not isinstance(length.value, int):
|
||||
raise UseInferenceDefault
|
||||
|
||||
inferred_sequence = helpers.safe_infer(node.args[0], context=context)
|
||||
if not inferred_sequence:
|
||||
raise UseInferenceDefault
|
||||
|
||||
if not isinstance(inferred_sequence, ACCEPTED_ITERABLES_FOR_SAMPLE):
|
||||
raise UseInferenceDefault
|
||||
|
||||
if length.value > len(inferred_sequence.elts):
|
||||
# In this case, this will raise a ValueError
|
||||
raise UseInferenceDefault
|
||||
|
||||
try:
|
||||
elts = random.sample(inferred_sequence.elts, length.value)
|
||||
except ValueError as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
new_node = List(lineno=node.lineno, col_offset=node.col_offset, parent=node.scope())
|
||||
new_elts = [
|
||||
_clone_node_with_lineno(elt, parent=new_node, lineno=new_node.lineno)
|
||||
for elt in elts
|
||||
]
|
||||
new_node.postinit(new_elts)
|
||||
return iter((new_node,))
|
||||
|
||||
|
||||
def _looks_like_random_sample(node):
|
||||
func = node.func
|
||||
if isinstance(func, Attribute):
|
||||
return func.attrname == "sample"
|
||||
if isinstance(func, Name):
|
||||
return func.name == "sample"
|
||||
return False
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
Call, inference_tip(infer_random_sample), _looks_like_random_sample
|
||||
)
|
|
@ -1,88 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
from typing import Optional
|
||||
|
||||
from astroid import context, inference_tip, nodes
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import extract_node, parse
|
||||
from astroid.const import PY37_PLUS, PY39_PLUS
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _re_transform():
|
||||
# Since Python 3.6 there is the RegexFlag enum
|
||||
# where every entry will be exposed via updating globals()
|
||||
return parse(
|
||||
"""
|
||||
import sre_compile
|
||||
ASCII = sre_compile.SRE_FLAG_ASCII
|
||||
IGNORECASE = sre_compile.SRE_FLAG_IGNORECASE
|
||||
LOCALE = sre_compile.SRE_FLAG_LOCALE
|
||||
UNICODE = sre_compile.SRE_FLAG_UNICODE
|
||||
MULTILINE = sre_compile.SRE_FLAG_MULTILINE
|
||||
DOTALL = sre_compile.SRE_FLAG_DOTALL
|
||||
VERBOSE = sre_compile.SRE_FLAG_VERBOSE
|
||||
A = ASCII
|
||||
I = IGNORECASE
|
||||
L = LOCALE
|
||||
U = UNICODE
|
||||
M = MULTILINE
|
||||
S = DOTALL
|
||||
X = VERBOSE
|
||||
TEMPLATE = sre_compile.SRE_FLAG_TEMPLATE
|
||||
T = TEMPLATE
|
||||
DEBUG = sre_compile.SRE_FLAG_DEBUG
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "re", _re_transform)
|
||||
|
||||
|
||||
CLASS_GETITEM_TEMPLATE = """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, item):
|
||||
return cls
|
||||
"""
|
||||
|
||||
|
||||
def _looks_like_pattern_or_match(node: nodes.Call) -> bool:
|
||||
"""Check for re.Pattern or re.Match call in stdlib.
|
||||
|
||||
Match these patterns from stdlib/re.py
|
||||
```py
|
||||
Pattern = type(...)
|
||||
Match = type(...)
|
||||
```
|
||||
"""
|
||||
return (
|
||||
node.root().name == "re"
|
||||
and isinstance(node.func, nodes.Name)
|
||||
and node.func.name == "type"
|
||||
and isinstance(node.parent, nodes.Assign)
|
||||
and len(node.parent.targets) == 1
|
||||
and isinstance(node.parent.targets[0], nodes.AssignName)
|
||||
and node.parent.targets[0].name in {"Pattern", "Match"}
|
||||
)
|
||||
|
||||
|
||||
def infer_pattern_match(
|
||||
node: nodes.Call, ctx: Optional[context.InferenceContext] = None
|
||||
):
|
||||
"""Infer re.Pattern and re.Match as classes. For PY39+ add `__class_getitem__`."""
|
||||
class_def = nodes.ClassDef(
|
||||
name=node.parent.targets[0].name,
|
||||
lineno=node.lineno,
|
||||
col_offset=node.col_offset,
|
||||
parent=node.parent,
|
||||
)
|
||||
if PY39_PLUS:
|
||||
func_to_add = extract_node(CLASS_GETITEM_TEMPLATE)
|
||||
class_def.locals["__class_getitem__"] = [func_to_add]
|
||||
return iter([class_def])
|
||||
|
||||
|
||||
if PY37_PLUS:
|
||||
AstroidManager().register_transform(
|
||||
nodes.Call, inference_tip(infer_pattern_match), _looks_like_pattern_or_match
|
||||
)
|
|
@ -1,75 +0,0 @@
|
|||
"""
|
||||
Astroid hooks for responses.
|
||||
|
||||
It might need to be manually updated from the public methods of
|
||||
:class:`responses.RequestsMock`.
|
||||
|
||||
See: https://github.com/getsentry/responses/blob/master/responses.py
|
||||
|
||||
"""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def responses_funcs():
|
||||
return parse(
|
||||
"""
|
||||
DELETE = "DELETE"
|
||||
GET = "GET"
|
||||
HEAD = "HEAD"
|
||||
OPTIONS = "OPTIONS"
|
||||
PATCH = "PATCH"
|
||||
POST = "POST"
|
||||
PUT = "PUT"
|
||||
response_callback = None
|
||||
|
||||
def reset():
|
||||
return
|
||||
|
||||
def add(
|
||||
method=None, # method or ``Response``
|
||||
url=None,
|
||||
body="",
|
||||
adding_headers=None,
|
||||
*args,
|
||||
**kwargs
|
||||
):
|
||||
return
|
||||
|
||||
def add_passthru(prefix):
|
||||
return
|
||||
|
||||
def remove(method_or_response=None, url=None):
|
||||
return
|
||||
|
||||
def replace(method_or_response=None, url=None, body="", *args, **kwargs):
|
||||
return
|
||||
|
||||
def add_callback(
|
||||
method, url, callback, match_querystring=False, content_type="text/plain"
|
||||
):
|
||||
return
|
||||
|
||||
calls = []
|
||||
|
||||
def __enter__():
|
||||
return
|
||||
|
||||
def __exit__(type, value, traceback):
|
||||
success = type is None
|
||||
return success
|
||||
|
||||
def activate(func):
|
||||
return func
|
||||
|
||||
def start():
|
||||
return
|
||||
|
||||
def stop(allow_assert=True):
|
||||
return
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "responses", responses_funcs)
|
|
@ -1,94 +0,0 @@
|
|||
# Copyright (c) 2019 Valentin Valls <valentin.valls@esrf.fr>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for scipy.signal module."""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def scipy_signal():
|
||||
return parse(
|
||||
"""
|
||||
# different functions defined in scipy.signals
|
||||
|
||||
def barthann(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def bartlett(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def blackman(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def blackmanharris(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def bohman(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def boxcar(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def chebwin(M, at, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def cosine(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def exponential(M, center=None, tau=1.0, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def flattop(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def gaussian(M, std, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def general_gaussian(M, p, sig, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def hamming(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def hann(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def hanning(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def impulse2(system, X0=None, T=None, N=None, **kwargs):
|
||||
return numpy.ndarray([0]), numpy.ndarray([0])
|
||||
|
||||
def kaiser(M, beta, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def nuttall(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def parzen(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def slepian(M, width, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def step2(system, X0=None, T=None, N=None, **kwargs):
|
||||
return numpy.ndarray([0]), numpy.ndarray([0])
|
||||
|
||||
def triang(M, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
|
||||
def tukey(M, alpha=0.5, sym=True):
|
||||
return numpy.ndarray([0])
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "scipy.signal", scipy_signal)
|
|
@ -1,117 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
"""Astroid hooks for the signal library.
|
||||
|
||||
The signal module generates the 'Signals', 'Handlers' and 'Sigmasks' IntEnums
|
||||
dynamically using the IntEnum._convert() classmethod, which modifies the module
|
||||
globals. Astroid is unable to handle this type of code.
|
||||
|
||||
Without these hooks, the following are erroneously triggered by Pylint:
|
||||
* E1101: Module 'signal' has no 'Signals' member (no-member)
|
||||
* E1101: Module 'signal' has no 'Handlers' member (no-member)
|
||||
* E1101: Module 'signal' has no 'Sigmasks' member (no-member)
|
||||
|
||||
These enums are defined slightly differently depending on the user's operating
|
||||
system and platform. These platform differences should follow the current
|
||||
Python typeshed stdlib `signal.pyi` stub file, available at:
|
||||
|
||||
* https://github.com/python/typeshed/blob/master/stdlib/signal.pyi
|
||||
|
||||
Note that the enum.auto() values defined here for the Signals, Handlers and
|
||||
Sigmasks IntEnums are just dummy integer values, and do not correspond to the
|
||||
actual standard signal numbers - which may vary depending on the system.
|
||||
"""
|
||||
|
||||
|
||||
import sys
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _signals_enums_transform():
|
||||
"""Generates the AST for 'Signals', 'Handlers' and 'Sigmasks' IntEnums."""
|
||||
return parse(_signals_enum() + _handlers_enum() + _sigmasks_enum())
|
||||
|
||||
|
||||
def _signals_enum():
|
||||
"""Generates the source code for the Signals int enum."""
|
||||
signals_enum = """
|
||||
import enum
|
||||
class Signals(enum.IntEnum):
|
||||
SIGABRT = enum.auto()
|
||||
SIGEMT = enum.auto()
|
||||
SIGFPE = enum.auto()
|
||||
SIGILL = enum.auto()
|
||||
SIGINFO = enum.auto()
|
||||
SIGINT = enum.auto()
|
||||
SIGSEGV = enum.auto()
|
||||
SIGTERM = enum.auto()
|
||||
"""
|
||||
if sys.platform != "win32":
|
||||
signals_enum += """
|
||||
SIGALRM = enum.auto()
|
||||
SIGBUS = enum.auto()
|
||||
SIGCHLD = enum.auto()
|
||||
SIGCONT = enum.auto()
|
||||
SIGHUP = enum.auto()
|
||||
SIGIO = enum.auto()
|
||||
SIGIOT = enum.auto()
|
||||
SIGKILL = enum.auto()
|
||||
SIGPIPE = enum.auto()
|
||||
SIGPROF = enum.auto()
|
||||
SIGQUIT = enum.auto()
|
||||
SIGSTOP = enum.auto()
|
||||
SIGSYS = enum.auto()
|
||||
SIGTRAP = enum.auto()
|
||||
SIGTSTP = enum.auto()
|
||||
SIGTTIN = enum.auto()
|
||||
SIGTTOU = enum.auto()
|
||||
SIGURG = enum.auto()
|
||||
SIGUSR1 = enum.auto()
|
||||
SIGUSR2 = enum.auto()
|
||||
SIGVTALRM = enum.auto()
|
||||
SIGWINCH = enum.auto()
|
||||
SIGXCPU = enum.auto()
|
||||
SIGXFSZ = enum.auto()
|
||||
"""
|
||||
if sys.platform == "win32":
|
||||
signals_enum += """
|
||||
SIGBREAK = enum.auto()
|
||||
"""
|
||||
if sys.platform not in ("darwin", "win32"):
|
||||
signals_enum += """
|
||||
SIGCLD = enum.auto()
|
||||
SIGPOLL = enum.auto()
|
||||
SIGPWR = enum.auto()
|
||||
SIGRTMAX = enum.auto()
|
||||
SIGRTMIN = enum.auto()
|
||||
"""
|
||||
return signals_enum
|
||||
|
||||
|
||||
def _handlers_enum():
|
||||
"""Generates the source code for the Handlers int enum."""
|
||||
return """
|
||||
import enum
|
||||
class Handlers(enum.IntEnum):
|
||||
SIG_DFL = enum.auto()
|
||||
SIG_IGN = eunm.auto()
|
||||
"""
|
||||
|
||||
|
||||
def _sigmasks_enum():
|
||||
"""Generates the source code for the Sigmasks int enum."""
|
||||
if sys.platform != "win32":
|
||||
return """
|
||||
import enum
|
||||
class Sigmasks(enum.IntEnum):
|
||||
SIG_BLOCK = enum.auto()
|
||||
SIG_UNBLOCK = enum.auto()
|
||||
SIG_SETMASK = enum.auto()
|
||||
"""
|
||||
return ""
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "signal", _signals_enums_transform)
|
|
@ -1,249 +0,0 @@
|
|||
# Copyright (c) 2014-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Ram Rachum <ram@rachum.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Artsiom Kaval <lezeroq@gmail.com>
|
||||
# Copyright (c) 2021 Francis Charette Migneault <francis.charette.migneault@gmail.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""Astroid hooks for six module."""
|
||||
|
||||
from textwrap import dedent
|
||||
|
||||
from astroid import nodes
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import AstroidBuilder
|
||||
from astroid.exceptions import (
|
||||
AstroidBuildingError,
|
||||
AttributeInferenceError,
|
||||
InferenceError,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
SIX_ADD_METACLASS = "six.add_metaclass"
|
||||
SIX_WITH_METACLASS = "six.with_metaclass"
|
||||
|
||||
|
||||
def default_predicate(line):
|
||||
return line.strip()
|
||||
|
||||
|
||||
def _indent(text, prefix, predicate=default_predicate):
|
||||
"""Adds 'prefix' to the beginning of selected lines in 'text'.
|
||||
|
||||
If 'predicate' is provided, 'prefix' will only be added to the lines
|
||||
where 'predicate(line)' is True. If 'predicate' is not provided,
|
||||
it will default to adding 'prefix' to all non-empty lines that do not
|
||||
consist solely of whitespace characters.
|
||||
"""
|
||||
|
||||
def prefixed_lines():
|
||||
for line in text.splitlines(True):
|
||||
yield prefix + line if predicate(line) else line
|
||||
|
||||
return "".join(prefixed_lines())
|
||||
|
||||
|
||||
_IMPORTS = """
|
||||
import _io
|
||||
cStringIO = _io.StringIO
|
||||
filter = filter
|
||||
from itertools import filterfalse
|
||||
input = input
|
||||
from sys import intern
|
||||
map = map
|
||||
range = range
|
||||
from importlib import reload
|
||||
reload_module = lambda module: reload(module)
|
||||
from functools import reduce
|
||||
from shlex import quote as shlex_quote
|
||||
from io import StringIO
|
||||
from collections import UserDict, UserList, UserString
|
||||
xrange = range
|
||||
zip = zip
|
||||
from itertools import zip_longest
|
||||
import builtins
|
||||
import configparser
|
||||
import copyreg
|
||||
import _dummy_thread
|
||||
import http.cookiejar as http_cookiejar
|
||||
import http.cookies as http_cookies
|
||||
import html.entities as html_entities
|
||||
import html.parser as html_parser
|
||||
import http.client as http_client
|
||||
import http.server as http_server
|
||||
BaseHTTPServer = CGIHTTPServer = SimpleHTTPServer = http.server
|
||||
import pickle as cPickle
|
||||
import queue
|
||||
import reprlib
|
||||
import socketserver
|
||||
import _thread
|
||||
import winreg
|
||||
import xmlrpc.server as xmlrpc_server
|
||||
import xmlrpc.client as xmlrpc_client
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import email.mime.multipart as email_mime_multipart
|
||||
import email.mime.nonmultipart as email_mime_nonmultipart
|
||||
import email.mime.text as email_mime_text
|
||||
import email.mime.base as email_mime_base
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
import tkinter
|
||||
import tkinter.dialog as tkinter_dialog
|
||||
import tkinter.filedialog as tkinter_filedialog
|
||||
import tkinter.scrolledtext as tkinter_scrolledtext
|
||||
import tkinter.simpledialog as tkinder_simpledialog
|
||||
import tkinter.tix as tkinter_tix
|
||||
import tkinter.ttk as tkinter_ttk
|
||||
import tkinter.constants as tkinter_constants
|
||||
import tkinter.dnd as tkinter_dnd
|
||||
import tkinter.colorchooser as tkinter_colorchooser
|
||||
import tkinter.commondialog as tkinter_commondialog
|
||||
import tkinter.filedialog as tkinter_tkfiledialog
|
||||
import tkinter.font as tkinter_font
|
||||
import tkinter.messagebox as tkinter_messagebox
|
||||
import urllib
|
||||
import urllib.request as urllib_request
|
||||
import urllib.robotparser as urllib_robotparser
|
||||
import urllib.parse as urllib_parse
|
||||
import urllib.error as urllib_error
|
||||
"""
|
||||
|
||||
|
||||
def six_moves_transform():
|
||||
code = dedent(
|
||||
"""
|
||||
class Moves(object):
|
||||
{}
|
||||
moves = Moves()
|
||||
"""
|
||||
).format(_indent(_IMPORTS, " "))
|
||||
module = AstroidBuilder(AstroidManager()).string_build(code)
|
||||
module.name = "six.moves"
|
||||
return module
|
||||
|
||||
|
||||
def _six_fail_hook(modname):
|
||||
"""Fix six.moves imports due to the dynamic nature of this
|
||||
class.
|
||||
|
||||
Construct a pseudo-module which contains all the necessary imports
|
||||
for six
|
||||
|
||||
:param modname: Name of failed module
|
||||
:type modname: str
|
||||
|
||||
:return: An astroid module
|
||||
:rtype: nodes.Module
|
||||
"""
|
||||
|
||||
attribute_of = modname != "six.moves" and modname.startswith("six.moves")
|
||||
if modname != "six.moves" and not attribute_of:
|
||||
raise AstroidBuildingError(modname=modname)
|
||||
module = AstroidBuilder(AstroidManager()).string_build(_IMPORTS)
|
||||
module.name = "six.moves"
|
||||
if attribute_of:
|
||||
# Facilitate import of submodules in Moves
|
||||
start_index = len(module.name)
|
||||
attribute = modname[start_index:].lstrip(".").replace(".", "_")
|
||||
try:
|
||||
import_attr = module.getattr(attribute)[0]
|
||||
except AttributeInferenceError as exc:
|
||||
raise AstroidBuildingError(modname=modname) from exc
|
||||
if isinstance(import_attr, nodes.Import):
|
||||
submodule = AstroidManager().ast_from_module_name(import_attr.names[0][0])
|
||||
return submodule
|
||||
# Let dummy submodule imports pass through
|
||||
# This will cause an Uninferable result, which is okay
|
||||
return module
|
||||
|
||||
|
||||
def _looks_like_decorated_with_six_add_metaclass(node):
|
||||
if not node.decorators:
|
||||
return False
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, nodes.Call):
|
||||
continue
|
||||
if decorator.func.as_string() == SIX_ADD_METACLASS:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def transform_six_add_metaclass(node): # pylint: disable=inconsistent-return-statements
|
||||
"""Check if the given class node is decorated with *six.add_metaclass*
|
||||
|
||||
If so, inject its argument as the metaclass of the underlying class.
|
||||
"""
|
||||
if not node.decorators:
|
||||
return
|
||||
|
||||
for decorator in node.decorators.nodes:
|
||||
if not isinstance(decorator, nodes.Call):
|
||||
continue
|
||||
|
||||
try:
|
||||
func = next(decorator.func.infer())
|
||||
except (InferenceError, StopIteration):
|
||||
continue
|
||||
if func.qname() == SIX_ADD_METACLASS and decorator.args:
|
||||
metaclass = decorator.args[0]
|
||||
node._metaclass = metaclass
|
||||
return node
|
||||
return
|
||||
|
||||
|
||||
def _looks_like_nested_from_six_with_metaclass(node):
|
||||
if len(node.bases) != 1:
|
||||
return False
|
||||
base = node.bases[0]
|
||||
if not isinstance(base, nodes.Call):
|
||||
return False
|
||||
try:
|
||||
if hasattr(base.func, "expr"):
|
||||
# format when explicit 'six.with_metaclass' is used
|
||||
mod = base.func.expr.name
|
||||
func = base.func.attrname
|
||||
func = f"{mod}.{func}"
|
||||
else:
|
||||
# format when 'with_metaclass' is used directly (local import from six)
|
||||
# check reference module to avoid 'with_metaclass' name clashes
|
||||
mod = base.parent.parent
|
||||
import_from = mod.locals["with_metaclass"][0]
|
||||
func = f"{import_from.modname}.{base.func.name}"
|
||||
except (AttributeError, KeyError, IndexError):
|
||||
return False
|
||||
return func == SIX_WITH_METACLASS
|
||||
|
||||
|
||||
def transform_six_with_metaclass(node):
|
||||
"""Check if the given class node is defined with *six.with_metaclass*
|
||||
|
||||
If so, inject its argument as the metaclass of the underlying class.
|
||||
"""
|
||||
call = node.bases[0]
|
||||
node._metaclass = call.args[0]
|
||||
return node
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "six", six_moves_transform)
|
||||
register_module_extender(
|
||||
AstroidManager(), "requests.packages.urllib3.packages.six", six_moves_transform
|
||||
)
|
||||
AstroidManager().register_failed_import_hook(_six_fail_hook)
|
||||
AstroidManager().register_transform(
|
||||
nodes.ClassDef,
|
||||
transform_six_add_metaclass,
|
||||
_looks_like_decorated_with_six_add_metaclass,
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
nodes.ClassDef,
|
||||
transform_six_with_metaclass,
|
||||
_looks_like_nested_from_six_with_metaclass,
|
||||
)
|
|
@ -1,35 +0,0 @@
|
|||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _session_transform():
|
||||
return parse(
|
||||
"""
|
||||
from sqlalchemy.orm.session import Session
|
||||
|
||||
class sessionmaker:
|
||||
def __init__(
|
||||
self,
|
||||
bind=None,
|
||||
class_=Session,
|
||||
autoflush=True,
|
||||
autocommit=False,
|
||||
expire_on_commit=True,
|
||||
info=None,
|
||||
**kw
|
||||
):
|
||||
return
|
||||
|
||||
def __call__(self, **local_kw):
|
||||
return Session()
|
||||
|
||||
def configure(self, **new_kw):
|
||||
return
|
||||
|
||||
return Session()
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "sqlalchemy.orm.session", _session_transform)
|
|
@ -1,77 +0,0 @@
|
|||
# Copyright (c) 2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2019 Benjamin Elven <25181435+S3ntinelX@users.noreply.github.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for the ssl library."""
|
||||
|
||||
from astroid import parse
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def ssl_transform():
|
||||
return parse(
|
||||
"""
|
||||
from _ssl import OPENSSL_VERSION_NUMBER, OPENSSL_VERSION_INFO, OPENSSL_VERSION
|
||||
from _ssl import _SSLContext, MemoryBIO
|
||||
from _ssl import (
|
||||
SSLError, SSLZeroReturnError, SSLWantReadError, SSLWantWriteError,
|
||||
SSLSyscallError, SSLEOFError,
|
||||
)
|
||||
from _ssl import CERT_NONE, CERT_OPTIONAL, CERT_REQUIRED
|
||||
from _ssl import txt2obj as _txt2obj, nid2obj as _nid2obj
|
||||
from _ssl import RAND_status, RAND_add, RAND_bytes, RAND_pseudo_bytes
|
||||
try:
|
||||
from _ssl import RAND_egd
|
||||
except ImportError:
|
||||
# LibreSSL does not provide RAND_egd
|
||||
pass
|
||||
from _ssl import (OP_ALL, OP_CIPHER_SERVER_PREFERENCE,
|
||||
OP_NO_COMPRESSION, OP_NO_SSLv2, OP_NO_SSLv3,
|
||||
OP_NO_TLSv1, OP_NO_TLSv1_1, OP_NO_TLSv1_2,
|
||||
OP_SINGLE_DH_USE, OP_SINGLE_ECDH_USE)
|
||||
|
||||
from _ssl import (ALERT_DESCRIPTION_ACCESS_DENIED, ALERT_DESCRIPTION_BAD_CERTIFICATE,
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_HASH_VALUE,
|
||||
ALERT_DESCRIPTION_BAD_CERTIFICATE_STATUS_RESPONSE,
|
||||
ALERT_DESCRIPTION_BAD_RECORD_MAC,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_EXPIRED,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_REVOKED,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNKNOWN,
|
||||
ALERT_DESCRIPTION_CERTIFICATE_UNOBTAINABLE,
|
||||
ALERT_DESCRIPTION_CLOSE_NOTIFY, ALERT_DESCRIPTION_DECODE_ERROR,
|
||||
ALERT_DESCRIPTION_DECOMPRESSION_FAILURE,
|
||||
ALERT_DESCRIPTION_DECRYPT_ERROR,
|
||||
ALERT_DESCRIPTION_HANDSHAKE_FAILURE,
|
||||
ALERT_DESCRIPTION_ILLEGAL_PARAMETER,
|
||||
ALERT_DESCRIPTION_INSUFFICIENT_SECURITY,
|
||||
ALERT_DESCRIPTION_INTERNAL_ERROR,
|
||||
ALERT_DESCRIPTION_NO_RENEGOTIATION,
|
||||
ALERT_DESCRIPTION_PROTOCOL_VERSION,
|
||||
ALERT_DESCRIPTION_RECORD_OVERFLOW,
|
||||
ALERT_DESCRIPTION_UNEXPECTED_MESSAGE,
|
||||
ALERT_DESCRIPTION_UNKNOWN_CA,
|
||||
ALERT_DESCRIPTION_UNKNOWN_PSK_IDENTITY,
|
||||
ALERT_DESCRIPTION_UNRECOGNIZED_NAME,
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_CERTIFICATE,
|
||||
ALERT_DESCRIPTION_UNSUPPORTED_EXTENSION,
|
||||
ALERT_DESCRIPTION_USER_CANCELLED)
|
||||
from _ssl import (SSL_ERROR_EOF, SSL_ERROR_INVALID_ERROR_CODE, SSL_ERROR_SSL,
|
||||
SSL_ERROR_SYSCALL, SSL_ERROR_WANT_CONNECT, SSL_ERROR_WANT_READ,
|
||||
SSL_ERROR_WANT_WRITE, SSL_ERROR_WANT_X509_LOOKUP, SSL_ERROR_ZERO_RETURN)
|
||||
from _ssl import VERIFY_CRL_CHECK_CHAIN, VERIFY_CRL_CHECK_LEAF, VERIFY_DEFAULT, VERIFY_X509_STRICT
|
||||
from _ssl import HAS_SNI, HAS_ECDH, HAS_NPN, HAS_ALPN
|
||||
from _ssl import _OPENSSL_API_VERSION
|
||||
from _ssl import PROTOCOL_SSLv23, PROTOCOL_TLSv1, PROTOCOL_TLSv1_1, PROTOCOL_TLSv1_2
|
||||
from _ssl import PROTOCOL_TLS, PROTOCOL_TLS_CLIENT, PROTOCOL_TLS_SERVER
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "ssl", ssl_transform)
|
|
@ -1,136 +0,0 @@
|
|||
# Copyright (c) 2016-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2018 Peter Talley <peterctalley@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Peter Pentchev <roam@ringlet.net>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Damien Baty <damien@damienbaty.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
import textwrap
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.const import PY37_PLUS, PY39_PLUS
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _subprocess_transform():
|
||||
communicate = (bytes("string", "ascii"), bytes("string", "ascii"))
|
||||
communicate_signature = "def communicate(self, input=None, timeout=None)"
|
||||
args = """\
|
||||
self, args, bufsize=0, executable=None, stdin=None, stdout=None, stderr=None,
|
||||
preexec_fn=None, close_fds=False, shell=False, cwd=None, env=None,
|
||||
universal_newlines=False, startupinfo=None, creationflags=0, restore_signals=True,
|
||||
start_new_session=False, pass_fds=(), *, encoding=None, errors=None"""
|
||||
if PY37_PLUS:
|
||||
args += ", text=None"
|
||||
init = f"""
|
||||
def __init__({args}):
|
||||
pass"""
|
||||
wait_signature = "def wait(self, timeout=None)"
|
||||
ctx_manager = """
|
||||
def __enter__(self): return self
|
||||
def __exit__(self, *args): pass
|
||||
"""
|
||||
py3_args = "args = []"
|
||||
|
||||
if PY37_PLUS:
|
||||
check_output_signature = """
|
||||
check_output(
|
||||
args, *,
|
||||
stdin=None,
|
||||
stderr=None,
|
||||
shell=False,
|
||||
cwd=None,
|
||||
encoding=None,
|
||||
errors=None,
|
||||
universal_newlines=False,
|
||||
timeout=None,
|
||||
env=None,
|
||||
text=None,
|
||||
restore_signals=True,
|
||||
preexec_fn=None,
|
||||
pass_fds=(),
|
||||
input=None,
|
||||
bufsize=0,
|
||||
executable=None,
|
||||
close_fds=False,
|
||||
startupinfo=None,
|
||||
creationflags=0,
|
||||
start_new_session=False
|
||||
):
|
||||
""".strip()
|
||||
else:
|
||||
check_output_signature = """
|
||||
check_output(
|
||||
args, *,
|
||||
stdin=None,
|
||||
stderr=None,
|
||||
shell=False,
|
||||
cwd=None,
|
||||
encoding=None,
|
||||
errors=None,
|
||||
universal_newlines=False,
|
||||
timeout=None,
|
||||
env=None,
|
||||
restore_signals=True,
|
||||
preexec_fn=None,
|
||||
pass_fds=(),
|
||||
input=None,
|
||||
bufsize=0,
|
||||
executable=None,
|
||||
close_fds=False,
|
||||
startupinfo=None,
|
||||
creationflags=0,
|
||||
start_new_session=False
|
||||
):
|
||||
""".strip()
|
||||
|
||||
code = textwrap.dedent(
|
||||
f"""
|
||||
def {check_output_signature}
|
||||
if universal_newlines:
|
||||
return ""
|
||||
return b""
|
||||
|
||||
class Popen(object):
|
||||
returncode = pid = 0
|
||||
stdin = stdout = stderr = file()
|
||||
{py3_args}
|
||||
|
||||
{communicate_signature}:
|
||||
return {communicate!r}
|
||||
{wait_signature}:
|
||||
return self.returncode
|
||||
def poll(self):
|
||||
return self.returncode
|
||||
def send_signal(self, signal):
|
||||
pass
|
||||
def terminate(self):
|
||||
pass
|
||||
def kill(self):
|
||||
pass
|
||||
{ctx_manager}
|
||||
"""
|
||||
)
|
||||
if PY39_PLUS:
|
||||
code += """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, item):
|
||||
pass
|
||||
"""
|
||||
|
||||
init_lines = textwrap.dedent(init).splitlines()
|
||||
indented_init = "\n".join(" " * 4 + line for line in init_lines)
|
||||
code += indented_init
|
||||
return parse(code)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "subprocess", _subprocess_transform)
|
|
@ -1,36 +0,0 @@
|
|||
# Copyright (c) 2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _thread_transform():
|
||||
return parse(
|
||||
"""
|
||||
class lock(object):
|
||||
def acquire(self, blocking=True, timeout=-1):
|
||||
return False
|
||||
def release(self):
|
||||
pass
|
||||
def __enter__(self):
|
||||
return True
|
||||
def __exit__(self, *args):
|
||||
pass
|
||||
def locked(self):
|
||||
return False
|
||||
|
||||
def Lock():
|
||||
return lock()
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
register_module_extender(AstroidManager(), "threading", _thread_transform)
|
|
@ -1,65 +0,0 @@
|
|||
"""
|
||||
Astroid hooks for type support.
|
||||
|
||||
Starting from python3.9, type object behaves as it had __class_getitem__ method.
|
||||
However it was not possible to simply add this method inside type's body, otherwise
|
||||
all types would also have this method. In this case it would have been possible
|
||||
to write str[int].
|
||||
Guido Van Rossum proposed a hack to handle this in the interpreter:
|
||||
https://github.com/python/cpython/blob/67e394562d67cbcd0ac8114e5439494e7645b8f5/Objects/abstract.c#L181-L184
|
||||
|
||||
This brain follows the same logic. It is no wise to add permanently the __class_getitem__ method
|
||||
to the type object. Instead we choose to add it only in the case of a subscript node
|
||||
which inside name node is type.
|
||||
Doing this type[int] is allowed whereas str[int] is not.
|
||||
|
||||
Thanks to Lukasz Langa for fruitful discussion.
|
||||
"""
|
||||
|
||||
from astroid import extract_node, inference_tip, nodes
|
||||
from astroid.const import PY39_PLUS
|
||||
from astroid.exceptions import UseInferenceDefault
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def _looks_like_type_subscript(node):
|
||||
"""
|
||||
Try to figure out if a Name node is used inside a type related subscript
|
||||
|
||||
:param node: node to check
|
||||
:type node: astroid.nodes.node_classes.NodeNG
|
||||
:return: true if the node is a Name node inside a type related subscript
|
||||
:rtype: bool
|
||||
"""
|
||||
if isinstance(node, nodes.Name) and isinstance(node.parent, nodes.Subscript):
|
||||
return node.name == "type"
|
||||
return False
|
||||
|
||||
|
||||
def infer_type_sub(node, context=None):
|
||||
"""
|
||||
Infer a type[...] subscript
|
||||
|
||||
:param node: node to infer
|
||||
:type node: astroid.nodes.node_classes.NodeNG
|
||||
:param context: inference context
|
||||
:type context: astroid.context.InferenceContext
|
||||
:return: the inferred node
|
||||
:rtype: nodes.NodeNG
|
||||
"""
|
||||
node_scope, _ = node.scope().lookup("type")
|
||||
if not isinstance(node_scope, nodes.Module) or node_scope.qname() != "builtins":
|
||||
raise UseInferenceDefault()
|
||||
class_src = """
|
||||
class type:
|
||||
def __class_getitem__(cls, key):
|
||||
return cls
|
||||
"""
|
||||
node = extract_node(class_src)
|
||||
return node.infer(context=context)
|
||||
|
||||
|
||||
if PY39_PLUS:
|
||||
AstroidManager().register_transform(
|
||||
nodes.Name, inference_tip(infer_type_sub), _looks_like_type_subscript
|
||||
)
|
|
@ -1,438 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
# Copyright (c) 2017-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 David Euresti <github@euresti.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Redoubts <Redoubts@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Tim Martin <tim@asymptotic.co.uk>
|
||||
# Copyright (c) 2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
|
||||
"""Astroid hooks for typing.py support."""
|
||||
import typing
|
||||
from functools import partial
|
||||
|
||||
from astroid import context, extract_node, inference_tip
|
||||
from astroid.const import PY37_PLUS, PY38_PLUS, PY39_PLUS
|
||||
from astroid.exceptions import (
|
||||
AttributeInferenceError,
|
||||
InferenceError,
|
||||
UseInferenceDefault,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import (
|
||||
Assign,
|
||||
AssignName,
|
||||
Attribute,
|
||||
Call,
|
||||
Const,
|
||||
Name,
|
||||
NodeNG,
|
||||
Subscript,
|
||||
Tuple,
|
||||
)
|
||||
from astroid.nodes.scoped_nodes import ClassDef, FunctionDef
|
||||
from astroid.util import Uninferable
|
||||
|
||||
TYPING_NAMEDTUPLE_BASENAMES = {"NamedTuple", "typing.NamedTuple"}
|
||||
TYPING_TYPEVARS = {"TypeVar", "NewType"}
|
||||
TYPING_TYPEVARS_QUALIFIED = {"typing.TypeVar", "typing.NewType"}
|
||||
TYPING_TYPE_TEMPLATE = """
|
||||
class Meta(type):
|
||||
def __getitem__(self, item):
|
||||
return self
|
||||
|
||||
@property
|
||||
def __args__(self):
|
||||
return ()
|
||||
|
||||
class {0}(metaclass=Meta):
|
||||
pass
|
||||
"""
|
||||
TYPING_MEMBERS = set(getattr(typing, "__all__", []))
|
||||
|
||||
TYPING_ALIAS = frozenset(
|
||||
(
|
||||
"typing.Hashable",
|
||||
"typing.Awaitable",
|
||||
"typing.Coroutine",
|
||||
"typing.AsyncIterable",
|
||||
"typing.AsyncIterator",
|
||||
"typing.Iterable",
|
||||
"typing.Iterator",
|
||||
"typing.Reversible",
|
||||
"typing.Sized",
|
||||
"typing.Container",
|
||||
"typing.Collection",
|
||||
"typing.Callable",
|
||||
"typing.AbstractSet",
|
||||
"typing.MutableSet",
|
||||
"typing.Mapping",
|
||||
"typing.MutableMapping",
|
||||
"typing.Sequence",
|
||||
"typing.MutableSequence",
|
||||
"typing.ByteString",
|
||||
"typing.Tuple",
|
||||
"typing.List",
|
||||
"typing.Deque",
|
||||
"typing.Set",
|
||||
"typing.FrozenSet",
|
||||
"typing.MappingView",
|
||||
"typing.KeysView",
|
||||
"typing.ItemsView",
|
||||
"typing.ValuesView",
|
||||
"typing.ContextManager",
|
||||
"typing.AsyncContextManager",
|
||||
"typing.Dict",
|
||||
"typing.DefaultDict",
|
||||
"typing.OrderedDict",
|
||||
"typing.Counter",
|
||||
"typing.ChainMap",
|
||||
"typing.Generator",
|
||||
"typing.AsyncGenerator",
|
||||
"typing.Type",
|
||||
"typing.Pattern",
|
||||
"typing.Match",
|
||||
)
|
||||
)
|
||||
|
||||
CLASS_GETITEM_TEMPLATE = """
|
||||
@classmethod
|
||||
def __class_getitem__(cls, item):
|
||||
return cls
|
||||
"""
|
||||
|
||||
|
||||
def looks_like_typing_typevar_or_newtype(node):
|
||||
func = node.func
|
||||
if isinstance(func, Attribute):
|
||||
return func.attrname in TYPING_TYPEVARS
|
||||
if isinstance(func, Name):
|
||||
return func.name in TYPING_TYPEVARS
|
||||
return False
|
||||
|
||||
|
||||
def infer_typing_typevar_or_newtype(node, context_itton=None):
|
||||
"""Infer a typing.TypeVar(...) or typing.NewType(...) call"""
|
||||
try:
|
||||
func = next(node.func.infer(context=context_itton))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if func.qname() not in TYPING_TYPEVARS_QUALIFIED:
|
||||
raise UseInferenceDefault
|
||||
if not node.args:
|
||||
raise UseInferenceDefault
|
||||
|
||||
typename = node.args[0].as_string().strip("'")
|
||||
node = extract_node(TYPING_TYPE_TEMPLATE.format(typename))
|
||||
return node.infer(context=context_itton)
|
||||
|
||||
|
||||
def _looks_like_typing_subscript(node):
|
||||
"""Try to figure out if a Subscript node *might* be a typing-related subscript"""
|
||||
if isinstance(node, Name):
|
||||
return node.name in TYPING_MEMBERS
|
||||
if isinstance(node, Attribute):
|
||||
return node.attrname in TYPING_MEMBERS
|
||||
if isinstance(node, Subscript):
|
||||
return _looks_like_typing_subscript(node.value)
|
||||
return False
|
||||
|
||||
|
||||
def infer_typing_attr(
|
||||
node: Subscript, ctx: typing.Optional[context.InferenceContext] = None
|
||||
) -> typing.Iterator[ClassDef]:
|
||||
"""Infer a typing.X[...] subscript"""
|
||||
try:
|
||||
value = next(node.value.infer())
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
|
||||
if (
|
||||
not value.qname().startswith("typing.")
|
||||
or PY37_PLUS
|
||||
and value.qname() in TYPING_ALIAS
|
||||
):
|
||||
# If typing subscript belongs to an alias
|
||||
# (PY37+) handle it separately.
|
||||
raise UseInferenceDefault
|
||||
|
||||
if (
|
||||
PY37_PLUS
|
||||
and isinstance(value, ClassDef)
|
||||
and value.qname()
|
||||
in {"typing.Generic", "typing.Annotated", "typing_extensions.Annotated"}
|
||||
):
|
||||
# With PY37+ typing.Generic and typing.Annotated (PY39) are subscriptable
|
||||
# through __class_getitem__. Since astroid can't easily
|
||||
# infer the native methods, replace them for an easy inference tip
|
||||
func_to_add = extract_node(CLASS_GETITEM_TEMPLATE)
|
||||
value.locals["__class_getitem__"] = [func_to_add]
|
||||
if (
|
||||
isinstance(node.parent, ClassDef)
|
||||
and node in node.parent.bases
|
||||
and getattr(node.parent, "__cache", None)
|
||||
):
|
||||
# node.parent.slots is evaluated and cached before the inference tip
|
||||
# is first applied. Remove the last result to allow a recalculation of slots
|
||||
cache = node.parent.__cache # type: ignore[attr-defined] # Unrecognized getattr
|
||||
if cache.get(node.parent.slots) is not None:
|
||||
del cache[node.parent.slots]
|
||||
return iter([value])
|
||||
|
||||
node = extract_node(TYPING_TYPE_TEMPLATE.format(value.qname().split(".")[-1]))
|
||||
return node.infer(context=ctx)
|
||||
|
||||
|
||||
def _looks_like_typedDict( # pylint: disable=invalid-name
|
||||
node: typing.Union[FunctionDef, ClassDef],
|
||||
) -> bool:
|
||||
"""Check if node is TypedDict FunctionDef."""
|
||||
return node.qname() in {"typing.TypedDict", "typing_extensions.TypedDict"}
|
||||
|
||||
|
||||
def infer_old_typedDict( # pylint: disable=invalid-name
|
||||
node: ClassDef, ctx: typing.Optional[context.InferenceContext] = None
|
||||
) -> typing.Iterator[ClassDef]:
|
||||
func_to_add = extract_node("dict")
|
||||
node.locals["__call__"] = [func_to_add]
|
||||
return iter([node])
|
||||
|
||||
|
||||
def infer_typedDict( # pylint: disable=invalid-name
|
||||
node: FunctionDef, ctx: typing.Optional[context.InferenceContext] = None
|
||||
) -> typing.Iterator[ClassDef]:
|
||||
"""Replace TypedDict FunctionDef with ClassDef."""
|
||||
class_def = ClassDef(
|
||||
name="TypedDict",
|
||||
lineno=node.lineno,
|
||||
col_offset=node.col_offset,
|
||||
parent=node.parent,
|
||||
)
|
||||
class_def.postinit(bases=[extract_node("dict")], body=[], decorators=None)
|
||||
func_to_add = extract_node("dict")
|
||||
class_def.locals["__call__"] = [func_to_add]
|
||||
return iter([class_def])
|
||||
|
||||
|
||||
def _looks_like_typing_alias(node: Call) -> bool:
|
||||
"""
|
||||
Returns True if the node corresponds to a call to _alias function.
|
||||
For example :
|
||||
|
||||
MutableSet = _alias(collections.abc.MutableSet, T)
|
||||
|
||||
:param node: call node
|
||||
"""
|
||||
return (
|
||||
isinstance(node.func, Name)
|
||||
and node.func.name == "_alias"
|
||||
and (
|
||||
# _alias function works also for builtins object such as list and dict
|
||||
isinstance(node.args[0], (Attribute, Name))
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def _forbid_class_getitem_access(node: ClassDef) -> None:
|
||||
"""
|
||||
Disable the access to __class_getitem__ method for the node in parameters
|
||||
"""
|
||||
|
||||
def full_raiser(origin_func, attr, *args, **kwargs):
|
||||
"""
|
||||
Raises an AttributeInferenceError in case of access to __class_getitem__ method.
|
||||
Otherwise just call origin_func.
|
||||
"""
|
||||
if attr == "__class_getitem__":
|
||||
raise AttributeInferenceError("__class_getitem__ access is not allowed")
|
||||
return origin_func(attr, *args, **kwargs)
|
||||
|
||||
try:
|
||||
node.getattr("__class_getitem__")
|
||||
# If we are here, then we are sure to modify object that do have __class_getitem__ method (which origin is one the
|
||||
# protocol defined in collections module) whereas the typing module consider it should not
|
||||
# We do not want __class_getitem__ to be found in the classdef
|
||||
partial_raiser = partial(full_raiser, node.getattr)
|
||||
node.getattr = partial_raiser
|
||||
except AttributeInferenceError:
|
||||
pass
|
||||
|
||||
|
||||
def infer_typing_alias(
|
||||
node: Call, ctx: typing.Optional[context.InferenceContext] = None
|
||||
) -> typing.Iterator[ClassDef]:
|
||||
"""
|
||||
Infers the call to _alias function
|
||||
Insert ClassDef, with same name as aliased class,
|
||||
in mro to simulate _GenericAlias.
|
||||
|
||||
:param node: call node
|
||||
:param context: inference context
|
||||
"""
|
||||
if (
|
||||
not isinstance(node.parent, Assign)
|
||||
or not len(node.parent.targets) == 1
|
||||
or not isinstance(node.parent.targets[0], AssignName)
|
||||
):
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
res = next(node.args[0].infer(context=ctx))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(node=node.args[0], context=context) from e
|
||||
|
||||
assign_name = node.parent.targets[0]
|
||||
|
||||
class_def = ClassDef(
|
||||
name=assign_name.name,
|
||||
lineno=assign_name.lineno,
|
||||
col_offset=assign_name.col_offset,
|
||||
parent=node.parent,
|
||||
)
|
||||
if res != Uninferable and isinstance(res, ClassDef):
|
||||
# Only add `res` as base if it's a `ClassDef`
|
||||
# This isn't the case for `typing.Pattern` and `typing.Match`
|
||||
class_def.postinit(bases=[res], body=[], decorators=None)
|
||||
|
||||
maybe_type_var = node.args[1]
|
||||
if (
|
||||
not PY39_PLUS
|
||||
and not (isinstance(maybe_type_var, Tuple) and not maybe_type_var.elts)
|
||||
or PY39_PLUS
|
||||
and isinstance(maybe_type_var, Const)
|
||||
and maybe_type_var.value > 0
|
||||
):
|
||||
# If typing alias is subscriptable, add `__class_getitem__` to ClassDef
|
||||
func_to_add = extract_node(CLASS_GETITEM_TEMPLATE)
|
||||
class_def.locals["__class_getitem__"] = [func_to_add]
|
||||
else:
|
||||
# If not, make sure that `__class_getitem__` access is forbidden.
|
||||
# This is an issue in cases where the aliased class implements it,
|
||||
# but the typing alias isn't subscriptable. E.g., `typing.ByteString` for PY39+
|
||||
_forbid_class_getitem_access(class_def)
|
||||
return iter([class_def])
|
||||
|
||||
|
||||
def _looks_like_special_alias(node: Call) -> bool:
|
||||
"""Return True if call is for Tuple or Callable alias.
|
||||
|
||||
In PY37 and PY38 the call is to '_VariadicGenericAlias' with 'tuple' as
|
||||
first argument. In PY39+ it is replaced by a call to '_TupleType'.
|
||||
|
||||
PY37: Tuple = _VariadicGenericAlias(tuple, (), inst=False, special=True)
|
||||
PY39: Tuple = _TupleType(tuple, -1, inst=False, name='Tuple')
|
||||
|
||||
|
||||
PY37: Callable = _VariadicGenericAlias(collections.abc.Callable, (), special=True)
|
||||
PY39: Callable = _CallableType(collections.abc.Callable, 2)
|
||||
"""
|
||||
return isinstance(node.func, Name) and (
|
||||
not PY39_PLUS
|
||||
and node.func.name == "_VariadicGenericAlias"
|
||||
and (
|
||||
isinstance(node.args[0], Name)
|
||||
and node.args[0].name == "tuple"
|
||||
or isinstance(node.args[0], Attribute)
|
||||
and node.args[0].as_string() == "collections.abc.Callable"
|
||||
)
|
||||
or PY39_PLUS
|
||||
and (
|
||||
node.func.name == "_TupleType"
|
||||
and isinstance(node.args[0], Name)
|
||||
and node.args[0].name == "tuple"
|
||||
or node.func.name == "_CallableType"
|
||||
and isinstance(node.args[0], Attribute)
|
||||
and node.args[0].as_string() == "collections.abc.Callable"
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
def infer_special_alias(
|
||||
node: Call, ctx: typing.Optional[context.InferenceContext] = None
|
||||
) -> typing.Iterator[ClassDef]:
|
||||
"""Infer call to tuple alias as new subscriptable class typing.Tuple."""
|
||||
if not (
|
||||
isinstance(node.parent, Assign)
|
||||
and len(node.parent.targets) == 1
|
||||
and isinstance(node.parent.targets[0], AssignName)
|
||||
):
|
||||
raise UseInferenceDefault
|
||||
try:
|
||||
res = next(node.args[0].infer(context=ctx))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(node=node.args[0], context=context) from e
|
||||
|
||||
assign_name = node.parent.targets[0]
|
||||
class_def = ClassDef(
|
||||
name=assign_name.name,
|
||||
parent=node.parent,
|
||||
)
|
||||
class_def.postinit(bases=[res], body=[], decorators=None)
|
||||
func_to_add = extract_node(CLASS_GETITEM_TEMPLATE)
|
||||
class_def.locals["__class_getitem__"] = [func_to_add]
|
||||
return iter([class_def])
|
||||
|
||||
|
||||
def _looks_like_typing_cast(node: Call) -> bool:
|
||||
return isinstance(node, Call) and (
|
||||
isinstance(node.func, Name)
|
||||
and node.func.name == "cast"
|
||||
or isinstance(node.func, Attribute)
|
||||
and node.func.attrname == "cast"
|
||||
)
|
||||
|
||||
|
||||
def infer_typing_cast(
|
||||
node: Call, ctx: typing.Optional[context.InferenceContext] = None
|
||||
) -> typing.Iterator[NodeNG]:
|
||||
"""Infer call to cast() returning same type as casted-from var"""
|
||||
if not isinstance(node.func, (Name, Attribute)):
|
||||
raise UseInferenceDefault
|
||||
|
||||
try:
|
||||
func = next(node.func.infer(context=ctx))
|
||||
except (InferenceError, StopIteration) as exc:
|
||||
raise UseInferenceDefault from exc
|
||||
if (
|
||||
not isinstance(func, FunctionDef)
|
||||
or func.qname() != "typing.cast"
|
||||
or len(node.args) != 2
|
||||
):
|
||||
raise UseInferenceDefault
|
||||
|
||||
return node.args[1].infer(context=ctx)
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
Call,
|
||||
inference_tip(infer_typing_typevar_or_newtype),
|
||||
looks_like_typing_typevar_or_newtype,
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
Subscript, inference_tip(infer_typing_attr), _looks_like_typing_subscript
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
Call, inference_tip(infer_typing_cast), _looks_like_typing_cast
|
||||
)
|
||||
|
||||
if PY39_PLUS:
|
||||
AstroidManager().register_transform(
|
||||
FunctionDef, inference_tip(infer_typedDict), _looks_like_typedDict
|
||||
)
|
||||
elif PY38_PLUS:
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, inference_tip(infer_old_typedDict), _looks_like_typedDict
|
||||
)
|
||||
|
||||
if PY37_PLUS:
|
||||
AstroidManager().register_transform(
|
||||
Call, inference_tip(infer_typing_alias), _looks_like_typing_alias
|
||||
)
|
||||
AstroidManager().register_transform(
|
||||
Call, inference_tip(infer_special_alias), _looks_like_special_alias
|
||||
)
|
|
@ -1,27 +0,0 @@
|
|||
"""Astroid hooks for unittest module"""
|
||||
from astroid.brain.helpers import register_module_extender
|
||||
from astroid.builder import parse
|
||||
from astroid.const import PY38_PLUS
|
||||
from astroid.manager import AstroidManager
|
||||
|
||||
|
||||
def IsolatedAsyncioTestCaseImport():
|
||||
"""
|
||||
In the unittest package, the IsolatedAsyncioTestCase class is imported lazily, i.e only
|
||||
when the __getattr__ method of the unittest module is called with 'IsolatedAsyncioTestCase' as
|
||||
argument. Thus the IsolatedAsyncioTestCase is not imported statically (during import time).
|
||||
This function mocks a classical static import of the IsolatedAsyncioTestCase.
|
||||
|
||||
(see https://github.com/PyCQA/pylint/issues/4060)
|
||||
"""
|
||||
return parse(
|
||||
"""
|
||||
from .async_case import IsolatedAsyncioTestCase
|
||||
"""
|
||||
)
|
||||
|
||||
|
||||
if PY38_PLUS:
|
||||
register_module_extender(
|
||||
AstroidManager(), "unittest", IsolatedAsyncioTestCaseImport
|
||||
)
|
|
@ -1,22 +0,0 @@
|
|||
# Copyright (c) 2017-2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Astroid hooks for the UUID module."""
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import Const
|
||||
from astroid.nodes.scoped_nodes import ClassDef
|
||||
|
||||
|
||||
def _patch_uuid_class(node):
|
||||
# The .int member is patched using __dict__
|
||||
node.locals["int"] = [Const(0, parent=node)]
|
||||
|
||||
|
||||
AstroidManager().register_transform(
|
||||
ClassDef, _patch_uuid_class, lambda node: node.qname() == "uuid.UUID"
|
||||
)
|
|
@ -1,13 +0,0 @@
|
|||
from astroid.nodes.scoped_nodes import Module
|
||||
|
||||
|
||||
def register_module_extender(manager, module_name, get_extension_mod):
|
||||
def transform(node):
|
||||
extension_module = get_extension_mod()
|
||||
for name, objs in extension_module.locals.items():
|
||||
node.locals[name] = objs
|
||||
for obj in objs:
|
||||
if obj.parent is extension_module:
|
||||
obj.parent = node
|
||||
|
||||
manager.register_transform(Module, transform, lambda n: n.name == module_name)
|
|
@ -1,471 +0,0 @@
|
|||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2013 Phil Schaf <flying-sheep@web.de>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014-2015 Google, Inc.
|
||||
# Copyright (c) 2014 Alexander Presnyakov <flagist0@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2017 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Tushar Sadhwani <86737547+tushar-deepsource@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Gregory P. Smith <greg@krypto.org>
|
||||
# Copyright (c) 2021 Kian Meng, Ang <kianmeng.ang@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""The AstroidBuilder makes astroid from living object and / or from _ast
|
||||
|
||||
The builder is not thread safe and can't be used to parse different sources
|
||||
at the same time.
|
||||
"""
|
||||
import os
|
||||
import textwrap
|
||||
import types
|
||||
from tokenize import detect_encoding
|
||||
from typing import List, Optional, Union
|
||||
|
||||
from astroid import bases, modutils, nodes, raw_building, rebuilder, util
|
||||
from astroid._ast import get_parser_module
|
||||
from astroid.exceptions import AstroidBuildingError, AstroidSyntaxError, InferenceError
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.node_classes import NodeNG
|
||||
|
||||
objects = util.lazy_import("objects")
|
||||
|
||||
# The name of the transient function that is used to
|
||||
# wrap expressions to be extracted when calling
|
||||
# extract_node.
|
||||
_TRANSIENT_FUNCTION = "__"
|
||||
|
||||
# The comment used to select a statement to be extracted
|
||||
# when calling extract_node.
|
||||
_STATEMENT_SELECTOR = "#@"
|
||||
MISPLACED_TYPE_ANNOTATION_ERROR = "misplaced type annotation"
|
||||
|
||||
|
||||
def open_source_file(filename):
|
||||
# pylint: disable=consider-using-with
|
||||
with open(filename, "rb") as byte_stream:
|
||||
encoding = detect_encoding(byte_stream.readline)[0]
|
||||
stream = open(filename, newline=None, encoding=encoding)
|
||||
data = stream.read()
|
||||
return stream, encoding, data
|
||||
|
||||
|
||||
def _can_assign_attr(node, attrname):
|
||||
try:
|
||||
slots = node.slots()
|
||||
except NotImplementedError:
|
||||
pass
|
||||
else:
|
||||
if slots and attrname not in {slot.value for slot in slots}:
|
||||
return False
|
||||
return node.qname() != "builtins.object"
|
||||
|
||||
|
||||
class AstroidBuilder(raw_building.InspectBuilder):
|
||||
"""Class for building an astroid tree from source code or from a live module.
|
||||
|
||||
The param *manager* specifies the manager class which should be used.
|
||||
If no manager is given, then the default one will be used. The
|
||||
param *apply_transforms* determines if the transforms should be
|
||||
applied after the tree was built from source or from a live object,
|
||||
by default being True.
|
||||
"""
|
||||
|
||||
# pylint: disable=redefined-outer-name
|
||||
def __init__(self, manager=None, apply_transforms=True):
|
||||
super().__init__(manager)
|
||||
self._apply_transforms = apply_transforms
|
||||
|
||||
def module_build(
|
||||
self, module: types.ModuleType, modname: Optional[str] = None
|
||||
) -> nodes.Module:
|
||||
"""Build an astroid from a living module instance."""
|
||||
node = None
|
||||
path = getattr(module, "__file__", None)
|
||||
loader = getattr(module, "__loader__", None)
|
||||
# Prefer the loader to get the source rather than assuming we have a
|
||||
# filesystem to read the source file from ourselves.
|
||||
if loader:
|
||||
modname = modname or module.__name__
|
||||
source = loader.get_source(modname)
|
||||
if source:
|
||||
node = self.string_build(source, modname, path=path)
|
||||
if node is None and path is not None:
|
||||
path_, ext = os.path.splitext(modutils._path_from_filename(path))
|
||||
if ext in {".py", ".pyc", ".pyo"} and os.path.exists(path_ + ".py"):
|
||||
node = self.file_build(path_ + ".py", modname)
|
||||
if node is None:
|
||||
# this is a built-in module
|
||||
# get a partial representation by introspection
|
||||
node = self.inspect_build(module, modname=modname, path=path)
|
||||
if self._apply_transforms:
|
||||
# We have to handle transformation by ourselves since the
|
||||
# rebuilder isn't called for builtin nodes
|
||||
node = self._manager.visit_transforms(node)
|
||||
return node
|
||||
|
||||
def file_build(self, path, modname=None):
|
||||
"""Build astroid from a source code file (i.e. from an ast)
|
||||
|
||||
*path* is expected to be a python source file
|
||||
"""
|
||||
try:
|
||||
stream, encoding, data = open_source_file(path)
|
||||
except OSError as exc:
|
||||
raise AstroidBuildingError(
|
||||
"Unable to load file {path}:\n{error}",
|
||||
modname=modname,
|
||||
path=path,
|
||||
error=exc,
|
||||
) from exc
|
||||
except (SyntaxError, LookupError) as exc:
|
||||
raise AstroidSyntaxError(
|
||||
"Python 3 encoding specification error or unknown encoding:\n"
|
||||
"{error}",
|
||||
modname=modname,
|
||||
path=path,
|
||||
error=exc,
|
||||
) from exc
|
||||
except UnicodeError as exc: # wrong encoding
|
||||
# detect_encoding returns utf-8 if no encoding specified
|
||||
raise AstroidBuildingError(
|
||||
"Wrong or no encoding specified for {filename}.", filename=path
|
||||
) from exc
|
||||
with stream:
|
||||
# get module name if necessary
|
||||
if modname is None:
|
||||
try:
|
||||
modname = ".".join(modutils.modpath_from_file(path))
|
||||
except ImportError:
|
||||
modname = os.path.splitext(os.path.basename(path))[0]
|
||||
# build astroid representation
|
||||
module = self._data_build(data, modname, path)
|
||||
return self._post_build(module, encoding)
|
||||
|
||||
def string_build(self, data, modname="", path=None):
|
||||
"""Build astroid from source code string."""
|
||||
module = self._data_build(data, modname, path)
|
||||
module.file_bytes = data.encode("utf-8")
|
||||
return self._post_build(module, "utf-8")
|
||||
|
||||
def _post_build(self, module, encoding):
|
||||
"""Handles encoding and delayed nodes after a module has been built"""
|
||||
module.file_encoding = encoding
|
||||
self._manager.cache_module(module)
|
||||
# post tree building steps after we stored the module in the cache:
|
||||
for from_node in module._import_from_nodes:
|
||||
if from_node.modname == "__future__":
|
||||
for symbol, _ in from_node.names:
|
||||
module.future_imports.add(symbol)
|
||||
self.add_from_names_to_locals(from_node)
|
||||
# handle delayed assattr nodes
|
||||
for delayed in module._delayed_assattr:
|
||||
self.delayed_assattr(delayed)
|
||||
|
||||
# Visit the transforms
|
||||
if self._apply_transforms:
|
||||
module = self._manager.visit_transforms(module)
|
||||
return module
|
||||
|
||||
def _data_build(self, data, modname, path):
|
||||
"""Build tree node from data and add some information"""
|
||||
try:
|
||||
node, parser_module = _parse_string(data, type_comments=True)
|
||||
except (TypeError, ValueError, SyntaxError) as exc:
|
||||
raise AstroidSyntaxError(
|
||||
"Parsing Python code failed:\n{error}",
|
||||
source=data,
|
||||
modname=modname,
|
||||
path=path,
|
||||
error=exc,
|
||||
) from exc
|
||||
|
||||
if path is not None:
|
||||
node_file = os.path.abspath(path)
|
||||
else:
|
||||
node_file = "<?>"
|
||||
if modname.endswith(".__init__"):
|
||||
modname = modname[:-9]
|
||||
package = True
|
||||
else:
|
||||
package = (
|
||||
path is not None
|
||||
and os.path.splitext(os.path.basename(path))[0] == "__init__"
|
||||
)
|
||||
builder = rebuilder.TreeRebuilder(self._manager, parser_module)
|
||||
module = builder.visit_module(node, modname, node_file, package)
|
||||
module._import_from_nodes = builder._import_from_nodes
|
||||
module._delayed_assattr = builder._delayed_assattr
|
||||
return module
|
||||
|
||||
def add_from_names_to_locals(self, node):
|
||||
"""Store imported names to the locals
|
||||
|
||||
Resort the locals if coming from a delayed node
|
||||
"""
|
||||
|
||||
def _key_func(node):
|
||||
return node.fromlineno
|
||||
|
||||
def sort_locals(my_list):
|
||||
my_list.sort(key=_key_func)
|
||||
|
||||
for (name, asname) in node.names:
|
||||
if name == "*":
|
||||
try:
|
||||
imported = node.do_import_module()
|
||||
except AstroidBuildingError:
|
||||
continue
|
||||
for name in imported.public_names():
|
||||
node.parent.set_local(name, node)
|
||||
sort_locals(node.parent.scope().locals[name])
|
||||
else:
|
||||
node.parent.set_local(asname or name, node)
|
||||
sort_locals(node.parent.scope().locals[asname or name])
|
||||
|
||||
def delayed_assattr(self, node):
|
||||
"""Visit a AssAttr node
|
||||
|
||||
This adds name to locals and handle members definition.
|
||||
"""
|
||||
try:
|
||||
frame = node.frame(future=True)
|
||||
for inferred in node.expr.infer():
|
||||
if inferred is util.Uninferable:
|
||||
continue
|
||||
try:
|
||||
cls = inferred.__class__
|
||||
if cls is bases.Instance or cls is objects.ExceptionInstance:
|
||||
inferred = inferred._proxied
|
||||
iattrs = inferred.instance_attrs
|
||||
if not _can_assign_attr(inferred, node.attrname):
|
||||
continue
|
||||
elif isinstance(inferred, bases.Instance):
|
||||
# Const, Tuple or other containers that inherit from
|
||||
# `Instance`
|
||||
continue
|
||||
elif inferred.is_function:
|
||||
iattrs = inferred.instance_attrs
|
||||
else:
|
||||
iattrs = inferred.locals
|
||||
except AttributeError:
|
||||
# XXX log error
|
||||
continue
|
||||
values = iattrs.setdefault(node.attrname, [])
|
||||
if node in values:
|
||||
continue
|
||||
# get assign in __init__ first XXX useful ?
|
||||
if (
|
||||
frame.name == "__init__"
|
||||
and values
|
||||
and values[0].frame(future=True).name != "__init__"
|
||||
):
|
||||
values.insert(0, node)
|
||||
else:
|
||||
values.append(node)
|
||||
except InferenceError:
|
||||
pass
|
||||
|
||||
|
||||
def build_namespace_package_module(name: str, path: List[str]) -> nodes.Module:
|
||||
return nodes.Module(name, doc="", path=path, package=True)
|
||||
|
||||
|
||||
def parse(code, module_name="", path=None, apply_transforms=True):
|
||||
"""Parses a source string in order to obtain an astroid AST from it
|
||||
|
||||
:param str code: The code for the module.
|
||||
:param str module_name: The name for the module, if any
|
||||
:param str path: The path for the module
|
||||
:param bool apply_transforms:
|
||||
Apply the transforms for the give code. Use it if you
|
||||
don't want the default transforms to be applied.
|
||||
"""
|
||||
code = textwrap.dedent(code)
|
||||
builder = AstroidBuilder(
|
||||
manager=AstroidManager(), apply_transforms=apply_transforms
|
||||
)
|
||||
return builder.string_build(code, modname=module_name, path=path)
|
||||
|
||||
|
||||
def _extract_expressions(node):
|
||||
"""Find expressions in a call to _TRANSIENT_FUNCTION and extract them.
|
||||
|
||||
The function walks the AST recursively to search for expressions that
|
||||
are wrapped into a call to _TRANSIENT_FUNCTION. If it finds such an
|
||||
expression, it completely removes the function call node from the tree,
|
||||
replacing it by the wrapped expression inside the parent.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:yields: The sequence of wrapped expressions on the modified tree
|
||||
expression can be found.
|
||||
"""
|
||||
if (
|
||||
isinstance(node, nodes.Call)
|
||||
and isinstance(node.func, nodes.Name)
|
||||
and node.func.name == _TRANSIENT_FUNCTION
|
||||
):
|
||||
real_expr = node.args[0]
|
||||
real_expr.parent = node.parent
|
||||
# Search for node in all _astng_fields (the fields checked when
|
||||
# get_children is called) of its parent. Some of those fields may
|
||||
# be lists or tuples, in which case the elements need to be checked.
|
||||
# When we find it, replace it by real_expr, so that the AST looks
|
||||
# like no call to _TRANSIENT_FUNCTION ever took place.
|
||||
for name in node.parent._astroid_fields:
|
||||
child = getattr(node.parent, name)
|
||||
if isinstance(child, (list, tuple)):
|
||||
for idx, compound_child in enumerate(child):
|
||||
if compound_child is node:
|
||||
child[idx] = real_expr
|
||||
elif child is node:
|
||||
setattr(node.parent, name, real_expr)
|
||||
yield real_expr
|
||||
else:
|
||||
for child in node.get_children():
|
||||
yield from _extract_expressions(child)
|
||||
|
||||
|
||||
def _find_statement_by_line(node, line):
|
||||
"""Extracts the statement on a specific line from an AST.
|
||||
|
||||
If the line number of node matches line, it will be returned;
|
||||
otherwise its children are iterated and the function is called
|
||||
recursively.
|
||||
|
||||
:param node: An astroid node.
|
||||
:type node: astroid.bases.NodeNG
|
||||
:param line: The line number of the statement to extract.
|
||||
:type line: int
|
||||
:returns: The statement on the line, or None if no statement for the line
|
||||
can be found.
|
||||
:rtype: astroid.bases.NodeNG or None
|
||||
"""
|
||||
if isinstance(node, (nodes.ClassDef, nodes.FunctionDef, nodes.MatchCase)):
|
||||
# This is an inaccuracy in the AST: the nodes that can be
|
||||
# decorated do not carry explicit information on which line
|
||||
# the actual definition (class/def), but .fromline seems to
|
||||
# be close enough.
|
||||
node_line = node.fromlineno
|
||||
else:
|
||||
node_line = node.lineno
|
||||
|
||||
if node_line == line:
|
||||
return node
|
||||
|
||||
for child in node.get_children():
|
||||
result = _find_statement_by_line(child, line)
|
||||
if result:
|
||||
return result
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def extract_node(code: str, module_name: str = "") -> Union[NodeNG, List[NodeNG]]:
|
||||
"""Parses some Python code as a module and extracts a designated AST node.
|
||||
|
||||
Statements:
|
||||
To extract one or more statement nodes, append #@ to the end of the line
|
||||
|
||||
Examples:
|
||||
>>> def x():
|
||||
>>> def y():
|
||||
>>> return 1 #@
|
||||
|
||||
The return statement will be extracted.
|
||||
|
||||
>>> class X(object):
|
||||
>>> def meth(self): #@
|
||||
>>> pass
|
||||
|
||||
The function object 'meth' will be extracted.
|
||||
|
||||
Expressions:
|
||||
To extract arbitrary expressions, surround them with the fake
|
||||
function call __(...). After parsing, the surrounded expression
|
||||
will be returned and the whole AST (accessible via the returned
|
||||
node's parent attribute) will look like the function call was
|
||||
never there in the first place.
|
||||
|
||||
Examples:
|
||||
>>> a = __(1)
|
||||
|
||||
The const node will be extracted.
|
||||
|
||||
>>> def x(d=__(foo.bar)): pass
|
||||
|
||||
The node containing the default argument will be extracted.
|
||||
|
||||
>>> def foo(a, b):
|
||||
>>> return 0 < __(len(a)) < b
|
||||
|
||||
The node containing the function call 'len' will be extracted.
|
||||
|
||||
If no statements or expressions are selected, the last toplevel
|
||||
statement will be returned.
|
||||
|
||||
If the selected statement is a discard statement, (i.e. an expression
|
||||
turned into a statement), the wrapped expression is returned instead.
|
||||
|
||||
For convenience, singleton lists are unpacked.
|
||||
|
||||
:param str code: A piece of Python code that is parsed as
|
||||
a module. Will be passed through textwrap.dedent first.
|
||||
:param str module_name: The name of the module.
|
||||
:returns: The designated node from the parse tree, or a list of nodes.
|
||||
"""
|
||||
|
||||
def _extract(node):
|
||||
if isinstance(node, nodes.Expr):
|
||||
return node.value
|
||||
|
||||
return node
|
||||
|
||||
requested_lines = []
|
||||
for idx, line in enumerate(code.splitlines()):
|
||||
if line.strip().endswith(_STATEMENT_SELECTOR):
|
||||
requested_lines.append(idx + 1)
|
||||
|
||||
tree = parse(code, module_name=module_name)
|
||||
if not tree.body:
|
||||
raise ValueError("Empty tree, cannot extract from it")
|
||||
|
||||
extracted = []
|
||||
if requested_lines:
|
||||
extracted = [_find_statement_by_line(tree, line) for line in requested_lines]
|
||||
|
||||
# Modifies the tree.
|
||||
extracted.extend(_extract_expressions(tree))
|
||||
|
||||
if not extracted:
|
||||
extracted.append(tree.body[-1])
|
||||
|
||||
extracted = [_extract(node) for node in extracted]
|
||||
if len(extracted) == 1:
|
||||
return extracted[0]
|
||||
return extracted
|
||||
|
||||
|
||||
def _parse_string(data, type_comments=True):
|
||||
parser_module = get_parser_module(type_comments=type_comments)
|
||||
try:
|
||||
parsed = parser_module.parse(data + "\n", type_comments=type_comments)
|
||||
except SyntaxError as exc:
|
||||
# If the type annotations are misplaced for some reason, we do not want
|
||||
# to fail the entire parsing of the file, so we need to retry the parsing without
|
||||
# type comment support.
|
||||
if exc.args[0] != MISPLACED_TYPE_ANNOTATION_ERROR or not type_comments:
|
||||
raise
|
||||
|
||||
parser_module = get_parser_module(type_comments=False)
|
||||
parsed = parser_module.parse(data + "\n", type_comments=False)
|
||||
return parsed, parser_module
|
|
@ -1,21 +0,0 @@
|
|||
import enum
|
||||
import sys
|
||||
|
||||
PY38 = sys.version_info[:2] == (3, 8)
|
||||
PY37_PLUS = sys.version_info >= (3, 7)
|
||||
PY38_PLUS = sys.version_info >= (3, 8)
|
||||
PY39_PLUS = sys.version_info >= (3, 9)
|
||||
PY310_PLUS = sys.version_info >= (3, 10)
|
||||
BUILTINS = "builtins" # TODO Remove in 2.8
|
||||
|
||||
|
||||
class Context(enum.Enum):
|
||||
Load = 1
|
||||
Store = 2
|
||||
Del = 3
|
||||
|
||||
|
||||
# TODO Remove in 3.0 in favor of Context
|
||||
Load = Context.Load
|
||||
Store = Context.Store
|
||||
Del = Context.Del
|
|
@ -1,213 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Bryce Guinta <bryce.guinta@protonmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Kian Meng, Ang <kianmeng.ang@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Various context related utilities, including inference and call contexts."""
|
||||
import contextlib
|
||||
import pprint
|
||||
from typing import TYPE_CHECKING, List, MutableMapping, Optional, Sequence, Tuple
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid.nodes.node_classes import Keyword, NodeNG
|
||||
|
||||
|
||||
_INFERENCE_CACHE = {}
|
||||
|
||||
|
||||
def _invalidate_cache():
|
||||
_INFERENCE_CACHE.clear()
|
||||
|
||||
|
||||
class InferenceContext:
|
||||
"""Provide context for inference
|
||||
|
||||
Store already inferred nodes to save time
|
||||
Account for already visited nodes to stop infinite recursion
|
||||
"""
|
||||
|
||||
__slots__ = (
|
||||
"path",
|
||||
"lookupname",
|
||||
"callcontext",
|
||||
"boundnode",
|
||||
"extra_context",
|
||||
"_nodes_inferred",
|
||||
)
|
||||
|
||||
max_inferred = 100
|
||||
|
||||
def __init__(self, path=None, nodes_inferred=None):
|
||||
if nodes_inferred is None:
|
||||
self._nodes_inferred = [0]
|
||||
else:
|
||||
self._nodes_inferred = nodes_inferred
|
||||
self.path = path or set()
|
||||
"""
|
||||
:type: set(tuple(NodeNG, optional(str)))
|
||||
|
||||
Path of visited nodes and their lookupname
|
||||
|
||||
Currently this key is ``(node, context.lookupname)``
|
||||
"""
|
||||
self.lookupname = None
|
||||
"""
|
||||
:type: optional[str]
|
||||
|
||||
The original name of the node
|
||||
|
||||
e.g.
|
||||
foo = 1
|
||||
The inference of 'foo' is nodes.Const(1) but the lookup name is 'foo'
|
||||
"""
|
||||
self.callcontext = None
|
||||
"""
|
||||
:type: optional[CallContext]
|
||||
|
||||
The call arguments and keywords for the given context
|
||||
"""
|
||||
self.boundnode = None
|
||||
"""
|
||||
:type: optional[NodeNG]
|
||||
|
||||
The bound node of the given context
|
||||
|
||||
e.g. the bound node of object.__new__(cls) is the object node
|
||||
"""
|
||||
self.extra_context = {}
|
||||
"""
|
||||
:type: dict(NodeNG, Context)
|
||||
|
||||
Context that needs to be passed down through call stacks
|
||||
for call arguments
|
||||
"""
|
||||
|
||||
@property
|
||||
def nodes_inferred(self):
|
||||
"""
|
||||
Number of nodes inferred in this context and all its clones/descendents
|
||||
|
||||
Wrap inner value in a mutable cell to allow for mutating a class
|
||||
variable in the presence of __slots__
|
||||
"""
|
||||
return self._nodes_inferred[0]
|
||||
|
||||
@nodes_inferred.setter
|
||||
def nodes_inferred(self, value):
|
||||
self._nodes_inferred[0] = value
|
||||
|
||||
@property
|
||||
def inferred(
|
||||
self,
|
||||
) -> MutableMapping[
|
||||
Tuple["NodeNG", Optional[str], Optional[str], Optional[str]], Sequence["NodeNG"]
|
||||
]:
|
||||
"""
|
||||
Inferred node contexts to their mapped results
|
||||
|
||||
Currently the key is ``(node, lookupname, callcontext, boundnode)``
|
||||
and the value is tuple of the inferred results
|
||||
"""
|
||||
return _INFERENCE_CACHE
|
||||
|
||||
def push(self, node):
|
||||
"""Push node into inference path
|
||||
|
||||
:return: True if node is already in context path else False
|
||||
:rtype: bool
|
||||
|
||||
Allows one to see if the given node has already
|
||||
been looked at for this inference context"""
|
||||
name = self.lookupname
|
||||
if (node, name) in self.path:
|
||||
return True
|
||||
|
||||
self.path.add((node, name))
|
||||
return False
|
||||
|
||||
def clone(self):
|
||||
"""Clone inference path
|
||||
|
||||
For example, each side of a binary operation (BinOp)
|
||||
starts with the same context but diverge as each side is inferred
|
||||
so the InferenceContext will need be cloned"""
|
||||
# XXX copy lookupname/callcontext ?
|
||||
clone = InferenceContext(self.path.copy(), nodes_inferred=self._nodes_inferred)
|
||||
clone.callcontext = self.callcontext
|
||||
clone.boundnode = self.boundnode
|
||||
clone.extra_context = self.extra_context
|
||||
return clone
|
||||
|
||||
@contextlib.contextmanager
|
||||
def restore_path(self):
|
||||
path = set(self.path)
|
||||
yield
|
||||
self.path = path
|
||||
|
||||
def __str__(self):
|
||||
state = (
|
||||
f"{field}={pprint.pformat(getattr(self, field), width=80 - len(field))}"
|
||||
for field in self.__slots__
|
||||
)
|
||||
return "{}({})".format(type(self).__name__, ",\n ".join(state))
|
||||
|
||||
|
||||
class CallContext:
|
||||
"""Holds information for a call site."""
|
||||
|
||||
__slots__ = ("args", "keywords", "callee")
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
args: List["NodeNG"],
|
||||
keywords: Optional[List["Keyword"]] = None,
|
||||
callee: Optional["NodeNG"] = None,
|
||||
):
|
||||
self.args = args # Call positional arguments
|
||||
if keywords:
|
||||
keywords = [(arg.arg, arg.value) for arg in keywords]
|
||||
else:
|
||||
keywords = []
|
||||
self.keywords = keywords # Call keyword arguments
|
||||
self.callee = callee # Function being called
|
||||
|
||||
|
||||
def copy_context(context: Optional[InferenceContext]) -> InferenceContext:
|
||||
"""Clone a context if given, or return a fresh contexxt"""
|
||||
if context is not None:
|
||||
return context.clone()
|
||||
|
||||
return InferenceContext()
|
||||
|
||||
|
||||
def bind_context_to_node(context, node):
|
||||
"""Give a context a boundnode
|
||||
to retrieve the correct function name or attribute value
|
||||
with from further inference.
|
||||
|
||||
Do not use an existing context since the boundnode could then
|
||||
be incorrectly propagated higher up in the call stack.
|
||||
|
||||
:param context: Context to use
|
||||
:type context: Optional(context)
|
||||
|
||||
:param node: Node to do name lookups from
|
||||
:type node NodeNG:
|
||||
|
||||
:returns: A new context
|
||||
:rtype: InferenceContext
|
||||
"""
|
||||
context = copy_context(context)
|
||||
context.boundnode = node
|
||||
return context
|
|
@ -1,209 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2018, 2021 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2018 Tomas Gavenciak <gavento@ucw.cz>
|
||||
# Copyright (c) 2018 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2018 HoverHell <hoverhell@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Ram Rachum <ram@rachum.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
""" A few useful function/method decorators."""
|
||||
|
||||
import functools
|
||||
import inspect
|
||||
import sys
|
||||
import warnings
|
||||
from typing import Callable, TypeVar
|
||||
|
||||
import wrapt
|
||||
|
||||
from astroid import util
|
||||
from astroid.context import InferenceContext
|
||||
from astroid.exceptions import InferenceError
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
from typing import ParamSpec
|
||||
else:
|
||||
from typing_extensions import ParamSpec
|
||||
|
||||
R = TypeVar("R")
|
||||
P = ParamSpec("P")
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def cached(func, instance, args, kwargs):
|
||||
"""Simple decorator to cache result of method calls without args."""
|
||||
cache = getattr(instance, "__cache", None)
|
||||
if cache is None:
|
||||
instance.__cache = cache = {}
|
||||
try:
|
||||
return cache[func]
|
||||
except KeyError:
|
||||
cache[func] = result = func(*args, **kwargs)
|
||||
return result
|
||||
|
||||
|
||||
class cachedproperty:
|
||||
"""Provides a cached property equivalent to the stacking of
|
||||
@cached and @property, but more efficient.
|
||||
|
||||
After first usage, the <property_name> becomes part of the object's
|
||||
__dict__. Doing:
|
||||
|
||||
del obj.<property_name> empties the cache.
|
||||
|
||||
Idea taken from the pyramid_ framework and the mercurial_ project.
|
||||
|
||||
.. _pyramid: http://pypi.python.org/pypi/pyramid
|
||||
.. _mercurial: http://pypi.python.org/pypi/Mercurial
|
||||
"""
|
||||
|
||||
__slots__ = ("wrapped",)
|
||||
|
||||
def __init__(self, wrapped):
|
||||
try:
|
||||
wrapped.__name__
|
||||
except AttributeError as exc:
|
||||
raise TypeError(f"{wrapped} must have a __name__ attribute") from exc
|
||||
self.wrapped = wrapped
|
||||
|
||||
@property
|
||||
def __doc__(self):
|
||||
doc = getattr(self.wrapped, "__doc__", None)
|
||||
return "<wrapped by the cachedproperty decorator>%s" % (
|
||||
"\n%s" % doc if doc else ""
|
||||
)
|
||||
|
||||
def __get__(self, inst, objtype=None):
|
||||
if inst is None:
|
||||
return self
|
||||
val = self.wrapped(inst)
|
||||
setattr(inst, self.wrapped.__name__, val)
|
||||
return val
|
||||
|
||||
|
||||
def path_wrapper(func):
|
||||
"""return the given infer function wrapped to handle the path
|
||||
|
||||
Used to stop inference if the node has already been looked
|
||||
at for a given `InferenceContext` to prevent infinite recursion
|
||||
"""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapped(node, context=None, _func=func, **kwargs):
|
||||
"""wrapper function handling context"""
|
||||
if context is None:
|
||||
context = InferenceContext()
|
||||
if context.push(node):
|
||||
return
|
||||
|
||||
yielded = set()
|
||||
|
||||
for res in _func(node, context, **kwargs):
|
||||
# unproxy only true instance, not const, tuple, dict...
|
||||
if res.__class__.__name__ == "Instance":
|
||||
ares = res._proxied
|
||||
else:
|
||||
ares = res
|
||||
if ares not in yielded:
|
||||
yield res
|
||||
yielded.add(ares)
|
||||
|
||||
return wrapped
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def yes_if_nothing_inferred(func, instance, args, kwargs):
|
||||
generator = func(*args, **kwargs)
|
||||
|
||||
try:
|
||||
yield next(generator)
|
||||
except StopIteration:
|
||||
# generator is empty
|
||||
yield util.Uninferable
|
||||
return
|
||||
|
||||
yield from generator
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def raise_if_nothing_inferred(func, instance, args, kwargs):
|
||||
generator = func(*args, **kwargs)
|
||||
try:
|
||||
yield next(generator)
|
||||
except StopIteration as error:
|
||||
# generator is empty
|
||||
if error.args:
|
||||
# pylint: disable=not-a-mapping
|
||||
raise InferenceError(**error.args[0]) from error
|
||||
raise InferenceError(
|
||||
"StopIteration raised without any error information."
|
||||
) from error
|
||||
|
||||
yield from generator
|
||||
|
||||
|
||||
def deprecate_default_argument_values(
|
||||
astroid_version: str = "3.0", **arguments: str
|
||||
) -> Callable[[Callable[P, R]], Callable[P, R]]:
|
||||
"""Decorator which emitts a DeprecationWarning if any arguments specified
|
||||
are None or not passed at all.
|
||||
|
||||
Arguments should be a key-value mapping, with the key being the argument to check
|
||||
and the value being a type annotation as string for the value of the argument.
|
||||
"""
|
||||
# Helpful links
|
||||
# Decorator for DeprecationWarning: https://stackoverflow.com/a/49802489
|
||||
# Typing of stacked decorators: https://stackoverflow.com/a/68290080
|
||||
|
||||
def deco(func: Callable[P, R]) -> Callable[P, R]:
|
||||
"""Decorator function."""
|
||||
|
||||
@functools.wraps(func)
|
||||
def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
|
||||
"""Emit DeprecationWarnings if conditions are met."""
|
||||
|
||||
keys = list(inspect.signature(func).parameters.keys())
|
||||
for arg, type_annotation in arguments.items():
|
||||
try:
|
||||
index = keys.index(arg)
|
||||
except ValueError:
|
||||
raise Exception(
|
||||
f"Can't find argument '{arg}' for '{args[0].__class__.__qualname__}'"
|
||||
) from None
|
||||
if (
|
||||
# Check kwargs
|
||||
# - if found, check it's not None
|
||||
(arg in kwargs and kwargs[arg] is None)
|
||||
# Check args
|
||||
# - make sure not in kwargs
|
||||
# - len(args) needs to be long enough, if too short
|
||||
# arg can't be in args either
|
||||
# - args[index] should not be None
|
||||
or arg not in kwargs
|
||||
and (
|
||||
index == -1
|
||||
or len(args) <= index
|
||||
or (len(args) > index and args[index] is None)
|
||||
)
|
||||
):
|
||||
warnings.warn(
|
||||
f"'{arg}' will be a required argument for "
|
||||
f"'{args[0].__class__.__qualname__}.{func.__name__}' in astroid {astroid_version} "
|
||||
f"('{arg}' should be of type: '{type_annotation}')",
|
||||
DeprecationWarning,
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return wrapper
|
||||
|
||||
return deco
|
|
@ -1,301 +0,0 @@
|
|||
# Copyright (c) 2007, 2009-2010, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""this module contains exceptions used in the astroid library
|
||||
"""
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
from astroid import util
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid import nodes
|
||||
|
||||
__all__ = (
|
||||
"AstroidBuildingError",
|
||||
"AstroidBuildingException",
|
||||
"AstroidError",
|
||||
"AstroidImportError",
|
||||
"AstroidIndexError",
|
||||
"AstroidSyntaxError",
|
||||
"AstroidTypeError",
|
||||
"AstroidValueError",
|
||||
"AttributeInferenceError",
|
||||
"BinaryOperationError",
|
||||
"DuplicateBasesError",
|
||||
"InconsistentMroError",
|
||||
"InferenceError",
|
||||
"InferenceOverwriteError",
|
||||
"MroError",
|
||||
"NameInferenceError",
|
||||
"NoDefault",
|
||||
"NotFoundError",
|
||||
"OperationError",
|
||||
"ResolveError",
|
||||
"SuperArgumentTypeError",
|
||||
"SuperError",
|
||||
"TooManyLevelsError",
|
||||
"UnaryOperationError",
|
||||
"UnresolvableName",
|
||||
"UseInferenceDefault",
|
||||
)
|
||||
|
||||
|
||||
class AstroidError(Exception):
|
||||
"""base exception class for all astroid related exceptions
|
||||
|
||||
AstroidError and its subclasses are structured, intended to hold
|
||||
objects representing state when the exception is thrown. Field
|
||||
values are passed to the constructor as keyword-only arguments.
|
||||
Each subclass has its own set of standard fields, but use your
|
||||
best judgment to decide whether a specific exception instance
|
||||
needs more or fewer fields for debugging. Field values may be
|
||||
used to lazily generate the error message: self.message.format()
|
||||
will be called with the field names and values supplied as keyword
|
||||
arguments.
|
||||
"""
|
||||
|
||||
def __init__(self, message="", **kws):
|
||||
super().__init__(message)
|
||||
self.message = message
|
||||
for key, value in kws.items():
|
||||
setattr(self, key, value)
|
||||
|
||||
def __str__(self):
|
||||
return self.message.format(**vars(self))
|
||||
|
||||
|
||||
class AstroidBuildingError(AstroidError):
|
||||
"""exception class when we are unable to build an astroid representation
|
||||
|
||||
Standard attributes:
|
||||
modname: Name of the module that AST construction failed for.
|
||||
error: Exception raised during construction.
|
||||
"""
|
||||
|
||||
def __init__(self, message="Failed to import module {modname}.", **kws):
|
||||
super().__init__(message, **kws)
|
||||
|
||||
|
||||
class AstroidImportError(AstroidBuildingError):
|
||||
"""Exception class used when a module can't be imported by astroid."""
|
||||
|
||||
|
||||
class TooManyLevelsError(AstroidImportError):
|
||||
"""Exception class which is raised when a relative import was beyond the top-level.
|
||||
|
||||
Standard attributes:
|
||||
level: The level which was attempted.
|
||||
name: the name of the module on which the relative import was attempted.
|
||||
"""
|
||||
|
||||
level = None
|
||||
name = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message="Relative import with too many levels " "({level}) for module {name!r}",
|
||||
**kws,
|
||||
):
|
||||
super().__init__(message, **kws)
|
||||
|
||||
|
||||
class AstroidSyntaxError(AstroidBuildingError):
|
||||
"""Exception class used when a module can't be parsed."""
|
||||
|
||||
|
||||
class NoDefault(AstroidError):
|
||||
"""raised by function's `default_value` method when an argument has
|
||||
no default value
|
||||
|
||||
Standard attributes:
|
||||
func: Function node.
|
||||
name: Name of argument without a default.
|
||||
"""
|
||||
|
||||
func = None
|
||||
name = None
|
||||
|
||||
def __init__(self, message="{func!r} has no default for {name!r}.", **kws):
|
||||
super().__init__(message, **kws)
|
||||
|
||||
|
||||
class ResolveError(AstroidError):
|
||||
"""Base class of astroid resolution/inference error.
|
||||
|
||||
ResolveError is not intended to be raised.
|
||||
|
||||
Standard attributes:
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
|
||||
context = None
|
||||
|
||||
|
||||
class MroError(ResolveError):
|
||||
"""Error raised when there is a problem with method resolution of a class.
|
||||
|
||||
Standard attributes:
|
||||
mros: A sequence of sequences containing ClassDef nodes.
|
||||
cls: ClassDef node whose MRO resolution failed.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
|
||||
mros = ()
|
||||
cls = None
|
||||
|
||||
def __str__(self):
|
||||
mro_names = ", ".join(f"({', '.join(b.name for b in m)})" for m in self.mros)
|
||||
return self.message.format(mros=mro_names, cls=self.cls)
|
||||
|
||||
|
||||
class DuplicateBasesError(MroError):
|
||||
"""Error raised when there are duplicate bases in the same class bases."""
|
||||
|
||||
|
||||
class InconsistentMroError(MroError):
|
||||
"""Error raised when a class's MRO is inconsistent."""
|
||||
|
||||
|
||||
class SuperError(ResolveError):
|
||||
"""Error raised when there is a problem with a *super* call.
|
||||
|
||||
Standard attributes:
|
||||
*super_*: The Super instance that raised the exception.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
|
||||
super_ = None
|
||||
|
||||
def __str__(self):
|
||||
return self.message.format(**vars(self.super_))
|
||||
|
||||
|
||||
class InferenceError(ResolveError):
|
||||
"""raised when we are unable to infer a node
|
||||
|
||||
Standard attributes:
|
||||
node: The node inference was called on.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
|
||||
node = None
|
||||
context = None
|
||||
|
||||
def __init__(self, message="Inference failed for {node!r}.", **kws):
|
||||
super().__init__(message, **kws)
|
||||
|
||||
|
||||
# Why does this inherit from InferenceError rather than ResolveError?
|
||||
# Changing it causes some inference tests to fail.
|
||||
class NameInferenceError(InferenceError):
|
||||
"""Raised when a name lookup fails, corresponds to NameError.
|
||||
|
||||
Standard attributes:
|
||||
name: The name for which lookup failed, as a string.
|
||||
scope: The node representing the scope in which the lookup occurred.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
|
||||
name = None
|
||||
scope = None
|
||||
|
||||
def __init__(self, message="{name!r} not found in {scope!r}.", **kws):
|
||||
super().__init__(message, **kws)
|
||||
|
||||
|
||||
class AttributeInferenceError(ResolveError):
|
||||
"""Raised when an attribute lookup fails, corresponds to AttributeError.
|
||||
|
||||
Standard attributes:
|
||||
target: The node for which lookup failed.
|
||||
attribute: The attribute for which lookup failed, as a string.
|
||||
context: InferenceContext object.
|
||||
"""
|
||||
|
||||
target = None
|
||||
attribute = None
|
||||
|
||||
def __init__(self, message="{attribute!r} not found on {target!r}.", **kws):
|
||||
super().__init__(message, **kws)
|
||||
|
||||
|
||||
class UseInferenceDefault(Exception):
|
||||
"""exception to be raised in custom inference function to indicate that it
|
||||
should go back to the default behaviour
|
||||
"""
|
||||
|
||||
|
||||
class _NonDeducibleTypeHierarchy(Exception):
|
||||
"""Raised when is_subtype / is_supertype can't deduce the relation between two types."""
|
||||
|
||||
|
||||
class AstroidIndexError(AstroidError):
|
||||
"""Raised when an Indexable / Mapping does not have an index / key."""
|
||||
|
||||
|
||||
class AstroidTypeError(AstroidError):
|
||||
"""Raised when a TypeError would be expected in Python code."""
|
||||
|
||||
|
||||
class AstroidValueError(AstroidError):
|
||||
"""Raised when a ValueError would be expected in Python code."""
|
||||
|
||||
|
||||
class InferenceOverwriteError(AstroidError):
|
||||
"""Raised when an inference tip is overwritten
|
||||
|
||||
Currently only used for debugging.
|
||||
"""
|
||||
|
||||
|
||||
class ParentMissingError(AstroidError):
|
||||
"""Raised when a node which is expected to have a parent attribute is missing one
|
||||
|
||||
Standard attributes:
|
||||
target: The node for which the parent lookup failed.
|
||||
"""
|
||||
|
||||
def __init__(self, target: "nodes.NodeNG") -> None:
|
||||
self.target = target
|
||||
super().__init__(message=f"Parent not found on {target!r}.")
|
||||
|
||||
|
||||
class StatementMissing(ParentMissingError):
|
||||
"""Raised when a call to node.statement() does not return a node. This is because
|
||||
a node in the chain does not have a parent attribute and therefore does not
|
||||
return a node for statement().
|
||||
|
||||
Standard attributes:
|
||||
target: The node for which the parent lookup failed.
|
||||
"""
|
||||
|
||||
def __init__(self, target: "nodes.NodeNG") -> None:
|
||||
# pylint: disable-next=bad-super-call
|
||||
# https://github.com/PyCQA/pylint/issues/2903
|
||||
# https://github.com/PyCQA/astroid/pull/1217#discussion_r744149027
|
||||
super(ParentMissingError, self).__init__(
|
||||
message=f"Statement not found on {target!r}"
|
||||
)
|
||||
|
||||
|
||||
# Backwards-compatibility aliases
|
||||
OperationError = util.BadOperationMessage
|
||||
UnaryOperationError = util.BadUnaryOperationMessage
|
||||
BinaryOperationError = util.BadBinaryOperationMessage
|
||||
|
||||
SuperArgumentTypeError = SuperError
|
||||
UnresolvableName = NameInferenceError
|
||||
NotFoundError = AttributeInferenceError
|
||||
AstroidBuildingException = AstroidBuildingError
|
|
@ -1,238 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""_filter_stmts and helper functions. This method gets used in LocalsDictnodes.NodeNG._scope_lookup.
|
||||
It is not considered public.
|
||||
"""
|
||||
|
||||
from typing import List, Optional, Tuple
|
||||
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
def _get_filtered_node_statements(
|
||||
base_node: nodes.NodeNG, stmt_nodes: List[nodes.NodeNG]
|
||||
) -> List[Tuple[nodes.NodeNG, nodes.Statement]]:
|
||||
statements = [(node, node.statement(future=True)) for node in stmt_nodes]
|
||||
# Next we check if we have ExceptHandlers that are parent
|
||||
# of the underlying variable, in which case the last one survives
|
||||
if len(statements) > 1 and all(
|
||||
isinstance(stmt, nodes.ExceptHandler) for _, stmt in statements
|
||||
):
|
||||
statements = [
|
||||
(node, stmt) for node, stmt in statements if stmt.parent_of(base_node)
|
||||
]
|
||||
return statements
|
||||
|
||||
|
||||
def _is_from_decorator(node):
|
||||
"""Return True if the given node is the child of a decorator"""
|
||||
return any(isinstance(parent, nodes.Decorators) for parent in node.node_ancestors())
|
||||
|
||||
|
||||
def _get_if_statement_ancestor(node: nodes.NodeNG) -> Optional[nodes.If]:
|
||||
"""Return the first parent node that is an If node (or None)"""
|
||||
for parent in node.node_ancestors():
|
||||
if isinstance(parent, nodes.If):
|
||||
return parent
|
||||
return None
|
||||
|
||||
|
||||
def _filter_stmts(base_node: nodes.NodeNG, stmts, frame, offset):
|
||||
"""Filter the given list of statements to remove ignorable statements.
|
||||
|
||||
If base_node is not a frame itself and the name is found in the inner
|
||||
frame locals, statements will be filtered to remove ignorable
|
||||
statements according to base_node's location.
|
||||
|
||||
:param stmts: The statements to filter.
|
||||
:type stmts: list(nodes.NodeNG)
|
||||
|
||||
:param frame: The frame that all of the given statements belong to.
|
||||
:type frame: nodes.NodeNG
|
||||
|
||||
:param offset: The line offset to filter statements up to.
|
||||
:type offset: int
|
||||
|
||||
:returns: The filtered statements.
|
||||
:rtype: list(nodes.NodeNG)
|
||||
"""
|
||||
# if offset == -1, my actual frame is not the inner frame but its parent
|
||||
#
|
||||
# class A(B): pass
|
||||
#
|
||||
# we need this to resolve B correctly
|
||||
if offset == -1:
|
||||
myframe = base_node.frame().parent.frame()
|
||||
else:
|
||||
myframe = base_node.frame()
|
||||
# If the frame of this node is the same as the statement
|
||||
# of this node, then the node is part of a class or
|
||||
# a function definition and the frame of this node should be the
|
||||
# the upper frame, not the frame of the definition.
|
||||
# For more information why this is important,
|
||||
# see Pylint issue #295.
|
||||
# For example, for 'b', the statement is the same
|
||||
# as the frame / scope:
|
||||
#
|
||||
# def test(b=1):
|
||||
# ...
|
||||
if (
|
||||
base_node.parent
|
||||
and base_node.statement(future=True) is myframe
|
||||
and myframe.parent
|
||||
):
|
||||
myframe = myframe.parent.frame()
|
||||
|
||||
mystmt: Optional[nodes.Statement] = None
|
||||
if base_node.parent:
|
||||
mystmt = base_node.statement(future=True)
|
||||
|
||||
# line filtering if we are in the same frame
|
||||
#
|
||||
# take care node may be missing lineno information (this is the case for
|
||||
# nodes inserted for living objects)
|
||||
if myframe is frame and mystmt and mystmt.fromlineno is not None:
|
||||
assert mystmt.fromlineno is not None, mystmt
|
||||
mylineno = mystmt.fromlineno + offset
|
||||
else:
|
||||
# disabling lineno filtering
|
||||
mylineno = 0
|
||||
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
statements = _get_filtered_node_statements(base_node, stmts)
|
||||
for node, stmt in statements:
|
||||
# line filtering is on and we have reached our location, break
|
||||
if stmt.fromlineno and stmt.fromlineno > mylineno > 0:
|
||||
break
|
||||
# Ignore decorators with the same name as the
|
||||
# decorated function
|
||||
# Fixes issue #375
|
||||
if mystmt is stmt and _is_from_decorator(base_node):
|
||||
continue
|
||||
assert hasattr(node, "assign_type"), (
|
||||
node,
|
||||
node.scope(),
|
||||
node.scope().locals,
|
||||
)
|
||||
assign_type = node.assign_type()
|
||||
if node.has_base(base_node):
|
||||
break
|
||||
|
||||
_stmts, done = assign_type._get_filtered_stmts(base_node, node, _stmts, mystmt)
|
||||
if done:
|
||||
break
|
||||
|
||||
optional_assign = assign_type.optional_assign
|
||||
if optional_assign and assign_type.parent_of(base_node):
|
||||
# we are inside a loop, loop var assignment is hiding previous
|
||||
# assignment
|
||||
_stmts = [node]
|
||||
_stmt_parents = [stmt.parent]
|
||||
continue
|
||||
|
||||
if isinstance(assign_type, nodes.NamedExpr):
|
||||
# If the NamedExpr is in an if statement we do some basic control flow inference
|
||||
if_parent = _get_if_statement_ancestor(assign_type)
|
||||
if if_parent:
|
||||
# If the if statement is within another if statement we append the node
|
||||
# to possible statements
|
||||
if _get_if_statement_ancestor(if_parent):
|
||||
optional_assign = False
|
||||
_stmts.append(node)
|
||||
_stmt_parents.append(stmt.parent)
|
||||
# If the if statement is first-level and not within an orelse block
|
||||
# we know that it will be evaluated
|
||||
elif not if_parent.is_orelse:
|
||||
_stmts = [node]
|
||||
_stmt_parents = [stmt.parent]
|
||||
# Else we do not known enough about the control flow to be 100% certain
|
||||
# and we append to possible statements
|
||||
else:
|
||||
_stmts.append(node)
|
||||
_stmt_parents.append(stmt.parent)
|
||||
else:
|
||||
_stmts = [node]
|
||||
_stmt_parents = [stmt.parent]
|
||||
|
||||
# XXX comment various branches below!!!
|
||||
try:
|
||||
pindex = _stmt_parents.index(stmt.parent)
|
||||
except ValueError:
|
||||
pass
|
||||
else:
|
||||
# we got a parent index, this means the currently visited node
|
||||
# is at the same block level as a previously visited node
|
||||
if _stmts[pindex].assign_type().parent_of(assign_type):
|
||||
# both statements are not at the same block level
|
||||
continue
|
||||
# if currently visited node is following previously considered
|
||||
# assignment and both are not exclusive, we can drop the
|
||||
# previous one. For instance in the following code ::
|
||||
#
|
||||
# if a:
|
||||
# x = 1
|
||||
# else:
|
||||
# x = 2
|
||||
# print x
|
||||
#
|
||||
# we can't remove neither x = 1 nor x = 2 when looking for 'x'
|
||||
# of 'print x'; while in the following ::
|
||||
#
|
||||
# x = 1
|
||||
# x = 2
|
||||
# print x
|
||||
#
|
||||
# we can remove x = 1 when we see x = 2
|
||||
#
|
||||
# moreover, on loop assignment types, assignment won't
|
||||
# necessarily be done if the loop has no iteration, so we don't
|
||||
# want to clear previous assignments if any (hence the test on
|
||||
# optional_assign)
|
||||
if not (optional_assign or nodes.are_exclusive(_stmts[pindex], node)):
|
||||
del _stmt_parents[pindex]
|
||||
del _stmts[pindex]
|
||||
|
||||
# If base_node and node are exclusive, then we can ignore node
|
||||
if nodes.are_exclusive(base_node, node):
|
||||
continue
|
||||
|
||||
# An AssignName node overrides previous assignments if:
|
||||
# 1. node's statement always assigns
|
||||
# 2. node and base_node are in the same block (i.e., has the same parent as base_node)
|
||||
if isinstance(node, (nodes.NamedExpr, nodes.AssignName)):
|
||||
if isinstance(stmt, nodes.ExceptHandler):
|
||||
# If node's statement is an ExceptHandler, then it is the variable
|
||||
# bound to the caught exception. If base_node is not contained within
|
||||
# the exception handler block, node should override previous assignments;
|
||||
# otherwise, node should be ignored, as an exception variable
|
||||
# is local to the handler block.
|
||||
if stmt.parent_of(base_node):
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
else:
|
||||
continue
|
||||
elif not optional_assign and mystmt and stmt.parent is mystmt.parent:
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
elif isinstance(node, nodes.DelName):
|
||||
# Remove all previously stored assignments
|
||||
_stmts = []
|
||||
_stmt_parents = []
|
||||
continue
|
||||
# Add the new assignment
|
||||
_stmts.append(node)
|
||||
if isinstance(node, nodes.Arguments) or isinstance(
|
||||
node.parent, nodes.Arguments
|
||||
):
|
||||
# Special case for _stmt_parents when node is a function parameter;
|
||||
# in this case, stmt is the enclosing FunctionDef, which is what we
|
||||
# want to add to _stmt_parents, not stmt.parent. This case occurs when
|
||||
# node is an Arguments node (representing varargs or kwargs parameter),
|
||||
# and when node.parent is an Arguments node (other parameters).
|
||||
# See issue #180.
|
||||
_stmt_parents.append(stmt)
|
||||
else:
|
||||
_stmt_parents.append(stmt.parent)
|
||||
return _stmts
|
|
@ -1,316 +0,0 @@
|
|||
# Copyright (c) 2015-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Simon Hewitt <si@sjhewitt.co.uk>
|
||||
# Copyright (c) 2020 Bryce Guinta <bryce.guinta@protonmail.com>
|
||||
# Copyright (c) 2020 Ram Rachum <ram@rachum.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Tushar Sadhwani <86737547+tushar-deepsource@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 David Liu <david@cs.toronto.edu>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""
|
||||
Various helper utilities.
|
||||
"""
|
||||
|
||||
|
||||
from astroid import bases, manager, nodes, raw_building, util
|
||||
from astroid.context import CallContext, InferenceContext
|
||||
from astroid.exceptions import (
|
||||
AstroidTypeError,
|
||||
AttributeInferenceError,
|
||||
InferenceError,
|
||||
MroError,
|
||||
_NonDeducibleTypeHierarchy,
|
||||
)
|
||||
from astroid.nodes import scoped_nodes
|
||||
|
||||
|
||||
def _build_proxy_class(cls_name, builtins):
|
||||
proxy = raw_building.build_class(cls_name)
|
||||
proxy.parent = builtins
|
||||
return proxy
|
||||
|
||||
|
||||
def _function_type(function, builtins):
|
||||
if isinstance(function, scoped_nodes.Lambda):
|
||||
if function.root().name == "builtins":
|
||||
cls_name = "builtin_function_or_method"
|
||||
else:
|
||||
cls_name = "function"
|
||||
elif isinstance(function, bases.BoundMethod):
|
||||
cls_name = "method"
|
||||
elif isinstance(function, bases.UnboundMethod):
|
||||
cls_name = "function"
|
||||
return _build_proxy_class(cls_name, builtins)
|
||||
|
||||
|
||||
def _object_type(node, context=None):
|
||||
astroid_manager = manager.AstroidManager()
|
||||
builtins = astroid_manager.builtins_module
|
||||
context = context or InferenceContext()
|
||||
|
||||
for inferred in node.infer(context=context):
|
||||
if isinstance(inferred, scoped_nodes.ClassDef):
|
||||
if inferred.newstyle:
|
||||
metaclass = inferred.metaclass(context=context)
|
||||
if metaclass:
|
||||
yield metaclass
|
||||
continue
|
||||
yield builtins.getattr("type")[0]
|
||||
elif isinstance(inferred, (scoped_nodes.Lambda, bases.UnboundMethod)):
|
||||
yield _function_type(inferred, builtins)
|
||||
elif isinstance(inferred, scoped_nodes.Module):
|
||||
yield _build_proxy_class("module", builtins)
|
||||
else:
|
||||
yield inferred._proxied
|
||||
|
||||
|
||||
def object_type(node, context=None):
|
||||
"""Obtain the type of the given node
|
||||
|
||||
This is used to implement the ``type`` builtin, which means that it's
|
||||
used for inferring type calls, as well as used in a couple of other places
|
||||
in the inference.
|
||||
The node will be inferred first, so this function can support all
|
||||
sorts of objects, as long as they support inference.
|
||||
"""
|
||||
|
||||
try:
|
||||
types = set(_object_type(node, context))
|
||||
except InferenceError:
|
||||
return util.Uninferable
|
||||
if len(types) > 1 or not types:
|
||||
return util.Uninferable
|
||||
return list(types)[0]
|
||||
|
||||
|
||||
def _object_type_is_subclass(obj_type, class_or_seq, context=None):
|
||||
if not isinstance(class_or_seq, (tuple, list)):
|
||||
class_seq = (class_or_seq,)
|
||||
else:
|
||||
class_seq = class_or_seq
|
||||
|
||||
if obj_type is util.Uninferable:
|
||||
return util.Uninferable
|
||||
|
||||
# Instances are not types
|
||||
class_seq = [
|
||||
item if not isinstance(item, bases.Instance) else util.Uninferable
|
||||
for item in class_seq
|
||||
]
|
||||
# strict compatibility with issubclass
|
||||
# issubclass(type, (object, 1)) evaluates to true
|
||||
# issubclass(object, (1, type)) raises TypeError
|
||||
for klass in class_seq:
|
||||
if klass is util.Uninferable:
|
||||
raise AstroidTypeError("arg 2 must be a type or tuple of types")
|
||||
|
||||
for obj_subclass in obj_type.mro():
|
||||
if obj_subclass == klass:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def object_isinstance(node, class_or_seq, context=None):
|
||||
"""Check if a node 'isinstance' any node in class_or_seq
|
||||
|
||||
:param node: A given node
|
||||
:param class_or_seq: Union[nodes.NodeNG, Sequence[nodes.NodeNG]]
|
||||
:rtype: bool
|
||||
|
||||
:raises AstroidTypeError: if the given ``classes_or_seq`` are not types
|
||||
"""
|
||||
obj_type = object_type(node, context)
|
||||
if obj_type is util.Uninferable:
|
||||
return util.Uninferable
|
||||
return _object_type_is_subclass(obj_type, class_or_seq, context=context)
|
||||
|
||||
|
||||
def object_issubclass(node, class_or_seq, context=None):
|
||||
"""Check if a type is a subclass of any node in class_or_seq
|
||||
|
||||
:param node: A given node
|
||||
:param class_or_seq: Union[Nodes.NodeNG, Sequence[nodes.NodeNG]]
|
||||
:rtype: bool
|
||||
|
||||
:raises AstroidTypeError: if the given ``classes_or_seq`` are not types
|
||||
:raises AstroidError: if the type of the given node cannot be inferred
|
||||
or its type's mro doesn't work
|
||||
"""
|
||||
if not isinstance(node, nodes.ClassDef):
|
||||
raise TypeError(f"{node} needs to be a ClassDef node")
|
||||
return _object_type_is_subclass(node, class_or_seq, context=context)
|
||||
|
||||
|
||||
def safe_infer(node, context=None):
|
||||
"""Return the inferred value for the given node.
|
||||
|
||||
Return None if inference failed or if there is some ambiguity (more than
|
||||
one node has been inferred).
|
||||
"""
|
||||
try:
|
||||
inferit = node.infer(context=context)
|
||||
value = next(inferit)
|
||||
except (InferenceError, StopIteration):
|
||||
return None
|
||||
try:
|
||||
next(inferit)
|
||||
return None # None if there is ambiguity on the inferred node
|
||||
except InferenceError:
|
||||
return None # there is some kind of ambiguity
|
||||
except StopIteration:
|
||||
return value
|
||||
|
||||
|
||||
def has_known_bases(klass, context=None):
|
||||
"""Return true if all base classes of a class could be inferred."""
|
||||
try:
|
||||
return klass._all_bases_known
|
||||
except AttributeError:
|
||||
pass
|
||||
for base in klass.bases:
|
||||
result = safe_infer(base, context=context)
|
||||
# TODO: check for A->B->A->B pattern in class structure too?
|
||||
if (
|
||||
not isinstance(result, scoped_nodes.ClassDef)
|
||||
or result is klass
|
||||
or not has_known_bases(result, context=context)
|
||||
):
|
||||
klass._all_bases_known = False
|
||||
return False
|
||||
klass._all_bases_known = True
|
||||
return True
|
||||
|
||||
|
||||
def _type_check(type1, type2):
|
||||
if not all(map(has_known_bases, (type1, type2))):
|
||||
raise _NonDeducibleTypeHierarchy
|
||||
|
||||
if not all([type1.newstyle, type2.newstyle]):
|
||||
return False
|
||||
try:
|
||||
return type1 in type2.mro()[:-1]
|
||||
except MroError as e:
|
||||
# The MRO is invalid.
|
||||
raise _NonDeducibleTypeHierarchy from e
|
||||
|
||||
|
||||
def is_subtype(type1, type2):
|
||||
"""Check if *type1* is a subtype of *type2*."""
|
||||
return _type_check(type1=type2, type2=type1)
|
||||
|
||||
|
||||
def is_supertype(type1, type2):
|
||||
"""Check if *type2* is a supertype of *type1*."""
|
||||
return _type_check(type1, type2)
|
||||
|
||||
|
||||
def class_instance_as_index(node):
|
||||
"""Get the value as an index for the given instance.
|
||||
|
||||
If an instance provides an __index__ method, then it can
|
||||
be used in some scenarios where an integer is expected,
|
||||
for instance when multiplying or subscripting a list.
|
||||
"""
|
||||
context = InferenceContext()
|
||||
try:
|
||||
for inferred in node.igetattr("__index__", context=context):
|
||||
if not isinstance(inferred, bases.BoundMethod):
|
||||
continue
|
||||
|
||||
context.boundnode = node
|
||||
context.callcontext = CallContext(args=[], callee=inferred)
|
||||
for result in inferred.infer_call_result(node, context=context):
|
||||
if isinstance(result, nodes.Const) and isinstance(result.value, int):
|
||||
return result
|
||||
except InferenceError:
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def object_len(node, context=None):
|
||||
"""Infer length of given node object
|
||||
|
||||
:param Union[nodes.ClassDef, nodes.Instance] node:
|
||||
:param node: Node to infer length of
|
||||
|
||||
:raises AstroidTypeError: If an invalid node is returned
|
||||
from __len__ method or no __len__ method exists
|
||||
:raises InferenceError: If the given node cannot be inferred
|
||||
or if multiple nodes are inferred or if the code executed in python
|
||||
would result in a infinite recursive check for length
|
||||
:rtype int: Integer length of node
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.objects import FrozenSet
|
||||
|
||||
inferred_node = safe_infer(node, context=context)
|
||||
|
||||
# prevent self referential length calls from causing a recursion error
|
||||
# see https://github.com/PyCQA/astroid/issues/777
|
||||
node_frame = node.frame(future=True)
|
||||
if (
|
||||
isinstance(node_frame, scoped_nodes.FunctionDef)
|
||||
and node_frame.name == "__len__"
|
||||
and hasattr(inferred_node, "_proxied")
|
||||
and inferred_node._proxied == node_frame.parent
|
||||
):
|
||||
message = (
|
||||
"Self referential __len__ function will "
|
||||
"cause a RecursionError on line {} of {}".format(
|
||||
node.lineno, node.root().file
|
||||
)
|
||||
)
|
||||
raise InferenceError(message)
|
||||
|
||||
if inferred_node is None or inferred_node is util.Uninferable:
|
||||
raise InferenceError(node=node)
|
||||
if isinstance(inferred_node, nodes.Const) and isinstance(
|
||||
inferred_node.value, (bytes, str)
|
||||
):
|
||||
return len(inferred_node.value)
|
||||
if isinstance(inferred_node, (nodes.List, nodes.Set, nodes.Tuple, FrozenSet)):
|
||||
return len(inferred_node.elts)
|
||||
if isinstance(inferred_node, nodes.Dict):
|
||||
return len(inferred_node.items)
|
||||
|
||||
node_type = object_type(inferred_node, context=context)
|
||||
if not node_type:
|
||||
raise InferenceError(node=node)
|
||||
|
||||
try:
|
||||
len_call = next(node_type.igetattr("__len__", context=context))
|
||||
except StopIteration as e:
|
||||
raise AstroidTypeError(str(e)) from e
|
||||
except AttributeInferenceError as e:
|
||||
raise AstroidTypeError(
|
||||
f"object of type '{node_type.pytype()}' has no len()"
|
||||
) from e
|
||||
|
||||
inferred = len_call.infer_call_result(node, context)
|
||||
if inferred is util.Uninferable:
|
||||
raise InferenceError(node=node, context=context)
|
||||
result_of_len = next(inferred, None)
|
||||
if (
|
||||
isinstance(result_of_len, nodes.Const)
|
||||
and result_of_len.pytype() == "builtins.int"
|
||||
):
|
||||
return result_of_len.value
|
||||
if (
|
||||
result_of_len is None
|
||||
or isinstance(result_of_len, bases.Instance)
|
||||
and result_of_len.is_subtype_of("builtins.int")
|
||||
):
|
||||
# Fake a result as we don't know the arguments of the instance call.
|
||||
return 0
|
||||
raise AstroidTypeError(
|
||||
f"'{result_of_len}' object cannot be interpreted as an integer"
|
||||
)
|
1080
astroid/inference.py
1080
astroid/inference.py
File diff suppressed because it is too large
Load Diff
|
@ -1,74 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Transform utilities (filters and decorator)"""
|
||||
|
||||
import typing
|
||||
|
||||
import wrapt
|
||||
|
||||
from astroid.exceptions import InferenceOverwriteError
|
||||
from astroid.nodes import NodeNG
|
||||
|
||||
InferFn = typing.Callable[..., typing.Any]
|
||||
|
||||
_cache: typing.Dict[typing.Tuple[InferFn, NodeNG], typing.Any] = {}
|
||||
|
||||
|
||||
def clear_inference_tip_cache():
|
||||
"""Clear the inference tips cache."""
|
||||
_cache.clear()
|
||||
|
||||
|
||||
@wrapt.decorator
|
||||
def _inference_tip_cached(func, instance, args, kwargs):
|
||||
"""Cache decorator used for inference tips"""
|
||||
node = args[0]
|
||||
try:
|
||||
result = _cache[func, node]
|
||||
except KeyError:
|
||||
result = _cache[func, node] = list(func(*args, **kwargs))
|
||||
return iter(result)
|
||||
|
||||
|
||||
def inference_tip(infer_function: InferFn, raise_on_overwrite: bool = False) -> InferFn:
|
||||
"""Given an instance specific inference function, return a function to be
|
||||
given to AstroidManager().register_transform to set this inference function.
|
||||
|
||||
:param bool raise_on_overwrite: Raise an `InferenceOverwriteError`
|
||||
if the inference tip will overwrite another. Used for debugging
|
||||
|
||||
Typical usage
|
||||
|
||||
.. sourcecode:: python
|
||||
|
||||
AstroidManager().register_transform(Call, inference_tip(infer_named_tuple),
|
||||
predicate)
|
||||
|
||||
.. Note::
|
||||
|
||||
Using an inference tip will override
|
||||
any previously set inference tip for the given
|
||||
node. Use a predicate in the transform to prevent
|
||||
excess overwrites.
|
||||
"""
|
||||
|
||||
def transform(node: NodeNG, infer_function: InferFn = infer_function) -> NodeNG:
|
||||
if (
|
||||
raise_on_overwrite
|
||||
and node._explicit_inference is not None
|
||||
and node._explicit_inference is not infer_function
|
||||
):
|
||||
raise InferenceOverwriteError(
|
||||
"Inference already set to {existing_inference}. "
|
||||
"Trying to overwrite with {new_inference} for {node}".format(
|
||||
existing_inference=infer_function,
|
||||
new_inference=node._explicit_inference,
|
||||
node=node,
|
||||
)
|
||||
)
|
||||
# pylint: disable=no-value-for-parameter
|
||||
node._explicit_inference = _inference_tip_cached(infer_function)
|
||||
return node
|
||||
|
||||
return transform
|
|
@ -1,369 +0,0 @@
|
|||
# Copyright (c) 2016-2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2017 Chris Philip <chrisp533@gmail.com>
|
||||
# Copyright (c) 2017 Hugo <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2017 ioanatia <ioanatia@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Peter Kolbus <peter.kolbus@gmail.com>
|
||||
# Copyright (c) 2020 Raphael Gaschignard <raphael@rtpg.co>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 DudeNr33 <3929834+DudeNr33@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
import abc
|
||||
import collections
|
||||
import enum
|
||||
import importlib.machinery
|
||||
import os
|
||||
import sys
|
||||
import zipimport
|
||||
from functools import lru_cache
|
||||
|
||||
from . import util
|
||||
|
||||
ModuleType = enum.Enum(
|
||||
"ModuleType",
|
||||
"C_BUILTIN C_EXTENSION PKG_DIRECTORY "
|
||||
"PY_CODERESOURCE PY_COMPILED PY_FROZEN PY_RESOURCE "
|
||||
"PY_SOURCE PY_ZIPMODULE PY_NAMESPACE",
|
||||
)
|
||||
|
||||
|
||||
_ModuleSpec = collections.namedtuple(
|
||||
"_ModuleSpec", "name type location " "origin submodule_search_locations"
|
||||
)
|
||||
|
||||
|
||||
class ModuleSpec(_ModuleSpec):
|
||||
"""Defines a class similar to PEP 420's ModuleSpec
|
||||
|
||||
A module spec defines a name of a module, its type, location
|
||||
and where submodules can be found, if the module is a package.
|
||||
"""
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
name,
|
||||
module_type,
|
||||
location=None,
|
||||
origin=None,
|
||||
submodule_search_locations=None,
|
||||
):
|
||||
return _ModuleSpec.__new__(
|
||||
cls,
|
||||
name=name,
|
||||
type=module_type,
|
||||
location=location,
|
||||
origin=origin,
|
||||
submodule_search_locations=submodule_search_locations,
|
||||
)
|
||||
|
||||
|
||||
class Finder:
|
||||
"""A finder is a class which knows how to find a particular module."""
|
||||
|
||||
def __init__(self, path=None):
|
||||
self._path = path or sys.path
|
||||
|
||||
@abc.abstractmethod
|
||||
def find_module(self, modname, module_parts, processed, submodule_path):
|
||||
"""Find the given module
|
||||
|
||||
Each finder is responsible for each protocol of finding, as long as
|
||||
they all return a ModuleSpec.
|
||||
|
||||
:param str modname: The module which needs to be searched.
|
||||
:param list module_parts: It should be a list of strings,
|
||||
where each part contributes to the module's
|
||||
namespace.
|
||||
:param list processed: What parts from the module parts were processed
|
||||
so far.
|
||||
:param list submodule_path: A list of paths where the module
|
||||
can be looked into.
|
||||
:returns: A ModuleSpec, describing how and where the module was found,
|
||||
None, otherwise.
|
||||
"""
|
||||
|
||||
def contribute_to_path(self, spec, processed):
|
||||
"""Get a list of extra paths where this finder can search."""
|
||||
|
||||
|
||||
class ImportlibFinder(Finder):
|
||||
"""A finder based on the importlib module."""
|
||||
|
||||
_SUFFIXES = (
|
||||
[(s, ModuleType.C_EXTENSION) for s in importlib.machinery.EXTENSION_SUFFIXES]
|
||||
+ [(s, ModuleType.PY_SOURCE) for s in importlib.machinery.SOURCE_SUFFIXES]
|
||||
+ [(s, ModuleType.PY_COMPILED) for s in importlib.machinery.BYTECODE_SUFFIXES]
|
||||
)
|
||||
|
||||
def find_module(self, modname, module_parts, processed, submodule_path):
|
||||
if not isinstance(modname, str):
|
||||
raise TypeError(f"'modname' must be a str, not {type(modname)}")
|
||||
if submodule_path is not None:
|
||||
submodule_path = list(submodule_path)
|
||||
else:
|
||||
try:
|
||||
spec = importlib.util.find_spec(modname)
|
||||
if spec:
|
||||
if spec.loader is importlib.machinery.BuiltinImporter:
|
||||
return ModuleSpec(
|
||||
name=modname,
|
||||
location=None,
|
||||
module_type=ModuleType.C_BUILTIN,
|
||||
)
|
||||
if spec.loader is importlib.machinery.FrozenImporter:
|
||||
return ModuleSpec(
|
||||
name=modname,
|
||||
location=None,
|
||||
module_type=ModuleType.PY_FROZEN,
|
||||
)
|
||||
except ValueError:
|
||||
pass
|
||||
submodule_path = sys.path
|
||||
|
||||
for entry in submodule_path:
|
||||
package_directory = os.path.join(entry, modname)
|
||||
for suffix in (".py", importlib.machinery.BYTECODE_SUFFIXES[0]):
|
||||
package_file_name = "__init__" + suffix
|
||||
file_path = os.path.join(package_directory, package_file_name)
|
||||
if os.path.isfile(file_path):
|
||||
return ModuleSpec(
|
||||
name=modname,
|
||||
location=package_directory,
|
||||
module_type=ModuleType.PKG_DIRECTORY,
|
||||
)
|
||||
for suffix, type_ in ImportlibFinder._SUFFIXES:
|
||||
file_name = modname + suffix
|
||||
file_path = os.path.join(entry, file_name)
|
||||
if os.path.isfile(file_path):
|
||||
return ModuleSpec(
|
||||
name=modname, location=file_path, module_type=type_
|
||||
)
|
||||
return None
|
||||
|
||||
def contribute_to_path(self, spec, processed):
|
||||
if spec.location is None:
|
||||
# Builtin.
|
||||
return None
|
||||
|
||||
if _is_setuptools_namespace(spec.location):
|
||||
# extend_path is called, search sys.path for module/packages
|
||||
# of this name see pkgutil.extend_path documentation
|
||||
path = [
|
||||
os.path.join(p, *processed)
|
||||
for p in sys.path
|
||||
if os.path.isdir(os.path.join(p, *processed))
|
||||
]
|
||||
else:
|
||||
path = [spec.location]
|
||||
return path
|
||||
|
||||
|
||||
class ExplicitNamespacePackageFinder(ImportlibFinder):
|
||||
"""A finder for the explicit namespace packages, generated through pkg_resources."""
|
||||
|
||||
def find_module(self, modname, module_parts, processed, submodule_path):
|
||||
if processed:
|
||||
modname = ".".join(processed + [modname])
|
||||
if util.is_namespace(modname) and modname in sys.modules:
|
||||
submodule_path = sys.modules[modname].__path__
|
||||
return ModuleSpec(
|
||||
name=modname,
|
||||
location="",
|
||||
origin="namespace",
|
||||
module_type=ModuleType.PY_NAMESPACE,
|
||||
submodule_search_locations=submodule_path,
|
||||
)
|
||||
return None
|
||||
|
||||
def contribute_to_path(self, spec, processed):
|
||||
return spec.submodule_search_locations
|
||||
|
||||
|
||||
class ZipFinder(Finder):
|
||||
"""Finder that knows how to find a module inside zip files."""
|
||||
|
||||
def __init__(self, path):
|
||||
super().__init__(path)
|
||||
self._zipimporters = _precache_zipimporters(path)
|
||||
|
||||
def find_module(self, modname, module_parts, processed, submodule_path):
|
||||
try:
|
||||
file_type, filename, path = _search_zip(module_parts, self._zipimporters)
|
||||
except ImportError:
|
||||
return None
|
||||
|
||||
return ModuleSpec(
|
||||
name=modname,
|
||||
location=filename,
|
||||
origin="egg",
|
||||
module_type=file_type,
|
||||
submodule_search_locations=path,
|
||||
)
|
||||
|
||||
|
||||
class PathSpecFinder(Finder):
|
||||
"""Finder based on importlib.machinery.PathFinder."""
|
||||
|
||||
def find_module(self, modname, module_parts, processed, submodule_path):
|
||||
spec = importlib.machinery.PathFinder.find_spec(modname, path=submodule_path)
|
||||
if spec:
|
||||
# origin can be either a string on older Python versions
|
||||
# or None in case it is a namespace package:
|
||||
# https://github.com/python/cpython/pull/5481
|
||||
is_namespace_pkg = spec.origin in {"namespace", None}
|
||||
location = spec.origin if not is_namespace_pkg else None
|
||||
module_type = ModuleType.PY_NAMESPACE if is_namespace_pkg else None
|
||||
spec = ModuleSpec(
|
||||
name=spec.name,
|
||||
location=location,
|
||||
origin=spec.origin,
|
||||
module_type=module_type,
|
||||
submodule_search_locations=list(spec.submodule_search_locations or []),
|
||||
)
|
||||
return spec
|
||||
|
||||
def contribute_to_path(self, spec, processed):
|
||||
if spec.type == ModuleType.PY_NAMESPACE:
|
||||
return spec.submodule_search_locations
|
||||
return None
|
||||
|
||||
|
||||
_SPEC_FINDERS = (
|
||||
ImportlibFinder,
|
||||
ZipFinder,
|
||||
PathSpecFinder,
|
||||
ExplicitNamespacePackageFinder,
|
||||
)
|
||||
|
||||
|
||||
def _is_setuptools_namespace(location):
|
||||
try:
|
||||
with open(os.path.join(location, "__init__.py"), "rb") as stream:
|
||||
data = stream.read(4096)
|
||||
except OSError:
|
||||
return None
|
||||
else:
|
||||
extend_path = b"pkgutil" in data and b"extend_path" in data
|
||||
declare_namespace = (
|
||||
b"pkg_resources" in data and b"declare_namespace(__name__)" in data
|
||||
)
|
||||
return extend_path or declare_namespace
|
||||
|
||||
|
||||
@lru_cache()
|
||||
def _cached_set_diff(left, right):
|
||||
result = set(left)
|
||||
result.difference_update(right)
|
||||
return result
|
||||
|
||||
|
||||
def _precache_zipimporters(path=None):
|
||||
"""
|
||||
For each path that has not been already cached
|
||||
in the sys.path_importer_cache, create a new zipimporter
|
||||
instance and add it into the cache.
|
||||
Return a dict associating all paths, stored in the cache, to corresponding
|
||||
zipimporter instances.
|
||||
|
||||
:param path: paths that has to be added into the cache
|
||||
:return: association between paths stored in the cache and zipimporter instances
|
||||
"""
|
||||
pic = sys.path_importer_cache
|
||||
|
||||
# When measured, despite having the same complexity (O(n)),
|
||||
# converting to tuples and then caching the conversion to sets
|
||||
# and the set difference is faster than converting to sets
|
||||
# and then only caching the set difference.
|
||||
|
||||
req_paths = tuple(path or sys.path)
|
||||
cached_paths = tuple(pic)
|
||||
new_paths = _cached_set_diff(req_paths, cached_paths)
|
||||
# pylint: disable=no-member
|
||||
for entry_path in new_paths:
|
||||
try:
|
||||
pic[entry_path] = zipimport.zipimporter(entry_path)
|
||||
except zipimport.ZipImportError:
|
||||
continue
|
||||
return {
|
||||
key: value
|
||||
for key, value in pic.items()
|
||||
if isinstance(value, zipimport.zipimporter)
|
||||
}
|
||||
|
||||
|
||||
def _search_zip(modpath, pic):
|
||||
for filepath, importer in list(pic.items()):
|
||||
if importer is not None:
|
||||
found = importer.find_module(modpath[0])
|
||||
if found:
|
||||
if not importer.find_module(os.path.sep.join(modpath)):
|
||||
raise ImportError(
|
||||
"No module named %s in %s/%s"
|
||||
% (".".join(modpath[1:]), filepath, modpath)
|
||||
)
|
||||
# import code; code.interact(local=locals())
|
||||
return (
|
||||
ModuleType.PY_ZIPMODULE,
|
||||
os.path.abspath(filepath) + os.path.sep + os.path.sep.join(modpath),
|
||||
filepath,
|
||||
)
|
||||
raise ImportError(f"No module named {'.'.join(modpath)}")
|
||||
|
||||
|
||||
def _find_spec_with_path(search_path, modname, module_parts, processed, submodule_path):
|
||||
finders = [finder(search_path) for finder in _SPEC_FINDERS]
|
||||
for finder in finders:
|
||||
spec = finder.find_module(modname, module_parts, processed, submodule_path)
|
||||
if spec is None:
|
||||
continue
|
||||
return finder, spec
|
||||
|
||||
raise ImportError(f"No module named {'.'.join(module_parts)}")
|
||||
|
||||
|
||||
def find_spec(modpath, path=None):
|
||||
"""Find a spec for the given module.
|
||||
|
||||
:type modpath: list or tuple
|
||||
:param modpath:
|
||||
split module's name (i.e name of a module or package split
|
||||
on '.'), with leading empty strings for explicit relative import
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:rtype: ModuleSpec
|
||||
:return: A module spec, which describes how the module was
|
||||
found and where.
|
||||
"""
|
||||
_path = path or sys.path
|
||||
|
||||
# Need a copy for not mutating the argument.
|
||||
modpath = modpath[:]
|
||||
|
||||
submodule_path = None
|
||||
module_parts = modpath[:]
|
||||
processed = []
|
||||
|
||||
while modpath:
|
||||
modname = modpath.pop(0)
|
||||
finder, spec = _find_spec_with_path(
|
||||
_path, modname, module_parts, processed, submodule_path or path
|
||||
)
|
||||
processed.append(modname)
|
||||
if modpath:
|
||||
submodule_path = finder.contribute_to_path(spec, processed)
|
||||
|
||||
if spec.type == ModuleType.PKG_DIRECTORY:
|
||||
spec = spec._replace(submodule_search_locations=submodule_path)
|
||||
|
||||
return spec
|
|
@ -1,17 +0,0 @@
|
|||
# Copyright (c) 2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Neil Girdhar <mistersheik@gmail.com>
|
||||
|
||||
try:
|
||||
import pkg_resources
|
||||
except ImportError:
|
||||
pkg_resources = None # type: ignore[assignment]
|
||||
|
||||
|
||||
def is_namespace(modname):
|
||||
return (
|
||||
pkg_resources is not None
|
||||
and hasattr(pkg_resources, "_namespace_packages")
|
||||
and modname in pkg_resources._namespace_packages
|
||||
)
|
|
@ -1,68 +0,0 @@
|
|||
# Copyright (c) 2016-2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Contains logic for retrieving special methods.
|
||||
|
||||
This implementation does not rely on the dot attribute access
|
||||
logic, found in ``.getattr()``. The difference between these two
|
||||
is that the dunder methods are looked with the type slots
|
||||
(you can find more about these here
|
||||
http://lucumr.pocoo.org/2014/8/16/the-python-i-would-like-to-see/)
|
||||
As such, the lookup for the special methods is actually simpler than
|
||||
the dot attribute access.
|
||||
"""
|
||||
import itertools
|
||||
|
||||
import astroid
|
||||
from astroid.exceptions import AttributeInferenceError
|
||||
|
||||
|
||||
def _lookup_in_mro(node, name):
|
||||
attrs = node.locals.get(name, [])
|
||||
|
||||
nodes = itertools.chain.from_iterable(
|
||||
ancestor.locals.get(name, []) for ancestor in node.ancestors(recurs=True)
|
||||
)
|
||||
values = list(itertools.chain(attrs, nodes))
|
||||
if not values:
|
||||
raise AttributeInferenceError(attribute=name, target=node)
|
||||
|
||||
return values
|
||||
|
||||
|
||||
def lookup(node, name):
|
||||
"""Lookup the given special method name in the given *node*
|
||||
|
||||
If the special method was found, then a list of attributes
|
||||
will be returned. Otherwise, `astroid.AttributeInferenceError`
|
||||
is going to be raised.
|
||||
"""
|
||||
if isinstance(
|
||||
node, (astroid.List, astroid.Tuple, astroid.Const, astroid.Dict, astroid.Set)
|
||||
):
|
||||
return _builtin_lookup(node, name)
|
||||
if isinstance(node, astroid.Instance):
|
||||
return _lookup_in_mro(node, name)
|
||||
if isinstance(node, astroid.ClassDef):
|
||||
return _class_lookup(node, name)
|
||||
|
||||
raise AttributeInferenceError(attribute=name, target=node)
|
||||
|
||||
|
||||
def _class_lookup(node, name):
|
||||
metaclass = node.metaclass()
|
||||
if metaclass is None:
|
||||
raise AttributeInferenceError(attribute=name, target=node)
|
||||
|
||||
return _lookup_in_mro(metaclass, name)
|
||||
|
||||
|
||||
def _builtin_lookup(node, name):
|
||||
values = node.locals.get(name, [])
|
||||
if not values:
|
||||
raise AttributeInferenceError(attribute=name, target=node)
|
||||
|
||||
return values
|
|
@ -1,857 +0,0 @@
|
|||
# Copyright (c) 2016-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2017-2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2017 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2017 Calen Pennington <cale@edx.org>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
"""
|
||||
Data object model, as per https://docs.python.org/3/reference/datamodel.html.
|
||||
|
||||
This module describes, at least partially, a data object model for some
|
||||
of astroid's nodes. The model contains special attributes that nodes such
|
||||
as functions, classes, modules etc have, such as __doc__, __class__,
|
||||
__module__ etc, being used when doing attribute lookups over nodes.
|
||||
|
||||
For instance, inferring `obj.__class__` will first trigger an inference
|
||||
of the `obj` variable. If it was successfully inferred, then an attribute
|
||||
`__class__ will be looked for in the inferred object. This is the part
|
||||
where the data model occurs. The model is attached to those nodes
|
||||
and the lookup mechanism will try to see if attributes such as
|
||||
`__class__` are defined by the model or not. If they are defined,
|
||||
the model will be requested to return the corresponding value of that
|
||||
attribute. Thus the model can be viewed as a special part of the lookup
|
||||
mechanism.
|
||||
"""
|
||||
|
||||
import itertools
|
||||
import os
|
||||
import pprint
|
||||
import types
|
||||
from functools import lru_cache
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
import astroid
|
||||
from astroid import util
|
||||
from astroid.context import InferenceContext, copy_context
|
||||
from astroid.exceptions import AttributeInferenceError, InferenceError, NoDefault
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes import node_classes
|
||||
|
||||
objects = util.lazy_import("objects")
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid.objects import Property
|
||||
|
||||
IMPL_PREFIX = "attr_"
|
||||
|
||||
|
||||
def _dunder_dict(instance, attributes):
|
||||
obj = node_classes.Dict(parent=instance)
|
||||
|
||||
# Convert the keys to node strings
|
||||
keys = [
|
||||
node_classes.Const(value=value, parent=obj) for value in list(attributes.keys())
|
||||
]
|
||||
|
||||
# The original attribute has a list of elements for each key,
|
||||
# but that is not useful for retrieving the special attribute's value.
|
||||
# In this case, we're picking the last value from each list.
|
||||
values = [elem[-1] for elem in attributes.values()]
|
||||
|
||||
obj.postinit(list(zip(keys, values)))
|
||||
return obj
|
||||
|
||||
|
||||
class ObjectModel:
|
||||
def __init__(self):
|
||||
self._instance = None
|
||||
|
||||
def __repr__(self):
|
||||
result = []
|
||||
cname = type(self).__name__
|
||||
string = "%(cname)s(%(fields)s)"
|
||||
alignment = len(cname) + 1
|
||||
for field in sorted(self.attributes()):
|
||||
width = 80 - len(field) - alignment
|
||||
lines = pprint.pformat(field, indent=2, width=width).splitlines(True)
|
||||
|
||||
inner = [lines[0]]
|
||||
for line in lines[1:]:
|
||||
inner.append(" " * alignment + line)
|
||||
result.append(field)
|
||||
|
||||
return string % {
|
||||
"cname": cname,
|
||||
"fields": (",\n" + " " * alignment).join(result),
|
||||
}
|
||||
|
||||
def __call__(self, instance):
|
||||
self._instance = instance
|
||||
return self
|
||||
|
||||
def __get__(self, instance, cls=None):
|
||||
# ObjectModel needs to be a descriptor so that just doing
|
||||
# `special_attributes = SomeObjectModel` should be enough in the body of a node.
|
||||
# But at the same time, node.special_attributes should return an object
|
||||
# which can be used for manipulating the special attributes. That's the reason
|
||||
# we pass the instance through which it got accessed to ObjectModel.__call__,
|
||||
# returning itself afterwards, so we can still have access to the
|
||||
# underlying data model and to the instance for which it got accessed.
|
||||
return self(instance)
|
||||
|
||||
def __contains__(self, name):
|
||||
return name in self.attributes()
|
||||
|
||||
@lru_cache(maxsize=None)
|
||||
def attributes(self):
|
||||
"""Get the attributes which are exported by this object model."""
|
||||
return [
|
||||
obj[len(IMPL_PREFIX) :] for obj in dir(self) if obj.startswith(IMPL_PREFIX)
|
||||
]
|
||||
|
||||
def lookup(self, name):
|
||||
"""Look up the given *name* in the current model
|
||||
|
||||
It should return an AST or an interpreter object,
|
||||
but if the name is not found, then an AttributeInferenceError will be raised.
|
||||
"""
|
||||
|
||||
if name in self.attributes():
|
||||
return getattr(self, IMPL_PREFIX + name)
|
||||
raise AttributeInferenceError(target=self._instance, attribute=name)
|
||||
|
||||
|
||||
class ModuleModel(ObjectModel):
|
||||
def _builtins(self):
|
||||
builtins_ast_module = AstroidManager().builtins_module
|
||||
return builtins_ast_module.special_attributes.lookup("__dict__")
|
||||
|
||||
@property
|
||||
def attr_builtins(self):
|
||||
return self._builtins()
|
||||
|
||||
@property
|
||||
def attr___path__(self):
|
||||
if not self._instance.package:
|
||||
raise AttributeInferenceError(target=self._instance, attribute="__path__")
|
||||
|
||||
path_objs = [
|
||||
node_classes.Const(
|
||||
value=path
|
||||
if not path.endswith("__init__.py")
|
||||
else os.path.dirname(path),
|
||||
parent=self._instance,
|
||||
)
|
||||
for path in self._instance.path
|
||||
]
|
||||
|
||||
container = node_classes.List(parent=self._instance)
|
||||
container.postinit(path_objs)
|
||||
|
||||
return container
|
||||
|
||||
@property
|
||||
def attr___name__(self):
|
||||
return node_classes.Const(value=self._instance.name, parent=self._instance)
|
||||
|
||||
@property
|
||||
def attr___doc__(self):
|
||||
return node_classes.Const(value=self._instance.doc, parent=self._instance)
|
||||
|
||||
@property
|
||||
def attr___file__(self):
|
||||
return node_classes.Const(value=self._instance.file, parent=self._instance)
|
||||
|
||||
@property
|
||||
def attr___dict__(self):
|
||||
return _dunder_dict(self._instance, self._instance.globals)
|
||||
|
||||
@property
|
||||
def attr___package__(self):
|
||||
if not self._instance.package:
|
||||
value = ""
|
||||
else:
|
||||
value = self._instance.name
|
||||
|
||||
return node_classes.Const(value=value, parent=self._instance)
|
||||
|
||||
# These are related to the Python 3 implementation of the
|
||||
# import system,
|
||||
# https://docs.python.org/3/reference/import.html#import-related-module-attributes
|
||||
|
||||
@property
|
||||
def attr___spec__(self):
|
||||
# No handling for now.
|
||||
return node_classes.Unknown()
|
||||
|
||||
@property
|
||||
def attr___loader__(self):
|
||||
# No handling for now.
|
||||
return node_classes.Unknown()
|
||||
|
||||
@property
|
||||
def attr___cached__(self):
|
||||
# No handling for now.
|
||||
return node_classes.Unknown()
|
||||
|
||||
|
||||
class FunctionModel(ObjectModel):
|
||||
@property
|
||||
def attr___name__(self):
|
||||
return node_classes.Const(value=self._instance.name, parent=self._instance)
|
||||
|
||||
@property
|
||||
def attr___doc__(self):
|
||||
return node_classes.Const(value=self._instance.doc, parent=self._instance)
|
||||
|
||||
@property
|
||||
def attr___qualname__(self):
|
||||
return node_classes.Const(value=self._instance.qname(), parent=self._instance)
|
||||
|
||||
@property
|
||||
def attr___defaults__(self):
|
||||
func = self._instance
|
||||
if not func.args.defaults:
|
||||
return node_classes.Const(value=None, parent=func)
|
||||
|
||||
defaults_obj = node_classes.Tuple(parent=func)
|
||||
defaults_obj.postinit(func.args.defaults)
|
||||
return defaults_obj
|
||||
|
||||
@property
|
||||
def attr___annotations__(self):
|
||||
obj = node_classes.Dict(parent=self._instance)
|
||||
|
||||
if not self._instance.returns:
|
||||
returns = None
|
||||
else:
|
||||
returns = self._instance.returns
|
||||
|
||||
args = self._instance.args
|
||||
pair_annotations = itertools.chain(
|
||||
zip(args.args or [], args.annotations),
|
||||
zip(args.kwonlyargs, args.kwonlyargs_annotations),
|
||||
zip(args.posonlyargs or [], args.posonlyargs_annotations),
|
||||
)
|
||||
|
||||
annotations = {
|
||||
arg.name: annotation for (arg, annotation) in pair_annotations if annotation
|
||||
}
|
||||
if args.varargannotation:
|
||||
annotations[args.vararg] = args.varargannotation
|
||||
if args.kwargannotation:
|
||||
annotations[args.kwarg] = args.kwargannotation
|
||||
if returns:
|
||||
annotations["return"] = returns
|
||||
|
||||
items = [
|
||||
(node_classes.Const(key, parent=obj), value)
|
||||
for (key, value) in annotations.items()
|
||||
]
|
||||
|
||||
obj.postinit(items)
|
||||
return obj
|
||||
|
||||
@property
|
||||
def attr___dict__(self):
|
||||
return node_classes.Dict(parent=self._instance)
|
||||
|
||||
attr___globals__ = attr___dict__
|
||||
|
||||
@property
|
||||
def attr___kwdefaults__(self):
|
||||
def _default_args(args, parent):
|
||||
for arg in args.kwonlyargs:
|
||||
try:
|
||||
default = args.default_value(arg.name)
|
||||
except NoDefault:
|
||||
continue
|
||||
|
||||
name = node_classes.Const(arg.name, parent=parent)
|
||||
yield name, default
|
||||
|
||||
args = self._instance.args
|
||||
obj = node_classes.Dict(parent=self._instance)
|
||||
defaults = dict(_default_args(args, obj))
|
||||
|
||||
obj.postinit(list(defaults.items()))
|
||||
return obj
|
||||
|
||||
@property
|
||||
def attr___module__(self):
|
||||
return node_classes.Const(self._instance.root().qname())
|
||||
|
||||
@property
|
||||
def attr___get__(self):
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid import bases
|
||||
|
||||
func = self._instance
|
||||
|
||||
class DescriptorBoundMethod(bases.BoundMethod):
|
||||
"""Bound method which knows how to understand calling descriptor binding."""
|
||||
|
||||
def implicit_parameters(self):
|
||||
# Different than BoundMethod since the signature
|
||||
# is different.
|
||||
return 0
|
||||
|
||||
def infer_call_result(self, caller, context=None):
|
||||
if len(caller.args) > 2 or len(caller.args) < 1:
|
||||
raise InferenceError(
|
||||
"Invalid arguments for descriptor binding",
|
||||
target=self,
|
||||
context=context,
|
||||
)
|
||||
|
||||
context = copy_context(context)
|
||||
try:
|
||||
cls = next(caller.args[0].infer(context=context))
|
||||
except StopIteration as e:
|
||||
raise InferenceError(context=context, node=caller.args[0]) from e
|
||||
|
||||
if cls is astroid.Uninferable:
|
||||
raise InferenceError(
|
||||
"Invalid class inferred", target=self, context=context
|
||||
)
|
||||
|
||||
# For some reason func is a Node that the below
|
||||
# code is not expecting
|
||||
if isinstance(func, bases.BoundMethod):
|
||||
yield func
|
||||
return
|
||||
|
||||
# Rebuild the original value, but with the parent set as the
|
||||
# class where it will be bound.
|
||||
new_func = func.__class__(
|
||||
name=func.name,
|
||||
doc=func.doc,
|
||||
lineno=func.lineno,
|
||||
col_offset=func.col_offset,
|
||||
parent=func.parent,
|
||||
)
|
||||
# pylint: disable=no-member
|
||||
new_func.postinit(func.args, func.body, func.decorators, func.returns)
|
||||
|
||||
# Build a proper bound method that points to our newly built function.
|
||||
proxy = bases.UnboundMethod(new_func)
|
||||
yield bases.BoundMethod(proxy=proxy, bound=cls)
|
||||
|
||||
@property
|
||||
def args(self):
|
||||
"""Overwrite the underlying args to match those of the underlying func
|
||||
|
||||
Usually the underlying *func* is a function/method, as in:
|
||||
|
||||
def test(self):
|
||||
pass
|
||||
|
||||
This has only the *self* parameter but when we access test.__get__
|
||||
we get a new object which has two parameters, *self* and *type*.
|
||||
"""
|
||||
nonlocal func
|
||||
positional_or_keyword_params = func.args.args.copy()
|
||||
positional_or_keyword_params.append(astroid.AssignName(name="type"))
|
||||
|
||||
positional_only_params = func.args.posonlyargs.copy()
|
||||
|
||||
arguments = astroid.Arguments(parent=func.args.parent)
|
||||
arguments.postinit(
|
||||
args=positional_or_keyword_params,
|
||||
posonlyargs=positional_only_params,
|
||||
defaults=[],
|
||||
kwonlyargs=[],
|
||||
kw_defaults=[],
|
||||
annotations=[],
|
||||
)
|
||||
return arguments
|
||||
|
||||
return DescriptorBoundMethod(proxy=self._instance, bound=self._instance)
|
||||
|
||||
# These are here just for completion.
|
||||
@property
|
||||
def attr___ne__(self):
|
||||
return node_classes.Unknown()
|
||||
|
||||
attr___subclasshook__ = attr___ne__
|
||||
attr___str__ = attr___ne__
|
||||
attr___sizeof__ = attr___ne__
|
||||
attr___setattr___ = attr___ne__
|
||||
attr___repr__ = attr___ne__
|
||||
attr___reduce__ = attr___ne__
|
||||
attr___reduce_ex__ = attr___ne__
|
||||
attr___new__ = attr___ne__
|
||||
attr___lt__ = attr___ne__
|
||||
attr___eq__ = attr___ne__
|
||||
attr___gt__ = attr___ne__
|
||||
attr___format__ = attr___ne__
|
||||
attr___delattr___ = attr___ne__
|
||||
attr___getattribute__ = attr___ne__
|
||||
attr___hash__ = attr___ne__
|
||||
attr___init__ = attr___ne__
|
||||
attr___dir__ = attr___ne__
|
||||
attr___call__ = attr___ne__
|
||||
attr___class__ = attr___ne__
|
||||
attr___closure__ = attr___ne__
|
||||
attr___code__ = attr___ne__
|
||||
|
||||
|
||||
class ClassModel(ObjectModel):
|
||||
def __init__(self):
|
||||
# Add a context so that inferences called from an instance don't recurse endlessly
|
||||
self.context = InferenceContext()
|
||||
|
||||
super().__init__()
|
||||
|
||||
@property
|
||||
def attr___module__(self):
|
||||
return node_classes.Const(self._instance.root().qname())
|
||||
|
||||
@property
|
||||
def attr___name__(self):
|
||||
return node_classes.Const(self._instance.name)
|
||||
|
||||
@property
|
||||
def attr___qualname__(self):
|
||||
return node_classes.Const(self._instance.qname())
|
||||
|
||||
@property
|
||||
def attr___doc__(self):
|
||||
return node_classes.Const(self._instance.doc)
|
||||
|
||||
@property
|
||||
def attr___mro__(self):
|
||||
if not self._instance.newstyle:
|
||||
raise AttributeInferenceError(target=self._instance, attribute="__mro__")
|
||||
|
||||
mro = self._instance.mro()
|
||||
obj = node_classes.Tuple(parent=self._instance)
|
||||
obj.postinit(mro)
|
||||
return obj
|
||||
|
||||
@property
|
||||
def attr_mro(self):
|
||||
if not self._instance.newstyle:
|
||||
raise AttributeInferenceError(target=self._instance, attribute="mro")
|
||||
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid import bases
|
||||
|
||||
other_self = self
|
||||
|
||||
# Cls.mro is a method and we need to return one in order to have a proper inference.
|
||||
# The method we're returning is capable of inferring the underlying MRO though.
|
||||
class MroBoundMethod(bases.BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield other_self.attr___mro__
|
||||
|
||||
implicit_metaclass = self._instance.implicit_metaclass()
|
||||
mro_method = implicit_metaclass.locals["mro"][0]
|
||||
return MroBoundMethod(proxy=mro_method, bound=implicit_metaclass)
|
||||
|
||||
@property
|
||||
def attr___bases__(self):
|
||||
obj = node_classes.Tuple()
|
||||
context = InferenceContext()
|
||||
elts = list(self._instance._inferred_bases(context))
|
||||
obj.postinit(elts=elts)
|
||||
return obj
|
||||
|
||||
@property
|
||||
def attr___class__(self):
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid import helpers
|
||||
|
||||
return helpers.object_type(self._instance)
|
||||
|
||||
@property
|
||||
def attr___subclasses__(self):
|
||||
"""Get the subclasses of the underlying class
|
||||
|
||||
This looks only in the current module for retrieving the subclasses,
|
||||
thus it might miss a couple of them.
|
||||
"""
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid import bases
|
||||
from astroid.nodes import scoped_nodes
|
||||
|
||||
if not self._instance.newstyle:
|
||||
raise AttributeInferenceError(
|
||||
target=self._instance, attribute="__subclasses__"
|
||||
)
|
||||
|
||||
qname = self._instance.qname()
|
||||
root = self._instance.root()
|
||||
classes = [
|
||||
cls
|
||||
for cls in root.nodes_of_class(scoped_nodes.ClassDef)
|
||||
if cls != self._instance and cls.is_subtype_of(qname, context=self.context)
|
||||
]
|
||||
|
||||
obj = node_classes.List(parent=self._instance)
|
||||
obj.postinit(classes)
|
||||
|
||||
class SubclassesBoundMethod(bases.BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield obj
|
||||
|
||||
implicit_metaclass = self._instance.implicit_metaclass()
|
||||
subclasses_method = implicit_metaclass.locals["__subclasses__"][0]
|
||||
return SubclassesBoundMethod(proxy=subclasses_method, bound=implicit_metaclass)
|
||||
|
||||
@property
|
||||
def attr___dict__(self):
|
||||
return node_classes.Dict(parent=self._instance)
|
||||
|
||||
|
||||
class SuperModel(ObjectModel):
|
||||
@property
|
||||
def attr___thisclass__(self):
|
||||
return self._instance.mro_pointer
|
||||
|
||||
@property
|
||||
def attr___self_class__(self):
|
||||
return self._instance._self_class
|
||||
|
||||
@property
|
||||
def attr___self__(self):
|
||||
return self._instance.type
|
||||
|
||||
@property
|
||||
def attr___class__(self):
|
||||
return self._instance._proxied
|
||||
|
||||
|
||||
class UnboundMethodModel(ObjectModel):
|
||||
@property
|
||||
def attr___class__(self):
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid import helpers
|
||||
|
||||
return helpers.object_type(self._instance)
|
||||
|
||||
@property
|
||||
def attr___func__(self):
|
||||
return self._instance._proxied
|
||||
|
||||
@property
|
||||
def attr___self__(self):
|
||||
return node_classes.Const(value=None, parent=self._instance)
|
||||
|
||||
attr_im_func = attr___func__
|
||||
attr_im_class = attr___class__
|
||||
attr_im_self = attr___self__
|
||||
|
||||
|
||||
class BoundMethodModel(FunctionModel):
|
||||
@property
|
||||
def attr___func__(self):
|
||||
return self._instance._proxied._proxied
|
||||
|
||||
@property
|
||||
def attr___self__(self):
|
||||
return self._instance.bound
|
||||
|
||||
|
||||
class GeneratorModel(FunctionModel):
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# Append the values from the GeneratorType unto this object.
|
||||
ret = super().__new__(cls, *args, **kwargs)
|
||||
generator = AstroidManager().builtins_module["generator"]
|
||||
for name, values in generator.locals.items():
|
||||
method = values[0]
|
||||
|
||||
def patched(cls, meth=method):
|
||||
return meth
|
||||
|
||||
setattr(type(ret), IMPL_PREFIX + name, property(patched))
|
||||
|
||||
return ret
|
||||
|
||||
@property
|
||||
def attr___name__(self):
|
||||
return node_classes.Const(
|
||||
value=self._instance.parent.name, parent=self._instance
|
||||
)
|
||||
|
||||
@property
|
||||
def attr___doc__(self):
|
||||
return node_classes.Const(
|
||||
value=self._instance.parent.doc, parent=self._instance
|
||||
)
|
||||
|
||||
|
||||
class AsyncGeneratorModel(GeneratorModel):
|
||||
def __new__(cls, *args, **kwargs):
|
||||
# Append the values from the AGeneratorType unto this object.
|
||||
ret = super().__new__(cls, *args, **kwargs)
|
||||
astroid_builtins = AstroidManager().builtins_module
|
||||
generator = astroid_builtins.get("async_generator")
|
||||
if generator is None:
|
||||
# Make it backward compatible.
|
||||
generator = astroid_builtins.get("generator")
|
||||
|
||||
for name, values in generator.locals.items():
|
||||
method = values[0]
|
||||
|
||||
def patched(cls, meth=method):
|
||||
return meth
|
||||
|
||||
setattr(type(ret), IMPL_PREFIX + name, property(patched))
|
||||
|
||||
return ret
|
||||
|
||||
|
||||
class InstanceModel(ObjectModel):
|
||||
@property
|
||||
def attr___class__(self):
|
||||
return self._instance._proxied
|
||||
|
||||
@property
|
||||
def attr___module__(self):
|
||||
return node_classes.Const(self._instance.root().qname())
|
||||
|
||||
@property
|
||||
def attr___doc__(self):
|
||||
return node_classes.Const(self._instance.doc)
|
||||
|
||||
@property
|
||||
def attr___dict__(self):
|
||||
return _dunder_dict(self._instance, self._instance.instance_attrs)
|
||||
|
||||
|
||||
# Exception instances
|
||||
|
||||
|
||||
class ExceptionInstanceModel(InstanceModel):
|
||||
@property
|
||||
def attr_args(self):
|
||||
message = node_classes.Const("")
|
||||
args = node_classes.Tuple(parent=self._instance)
|
||||
args.postinit((message,))
|
||||
return args
|
||||
|
||||
@property
|
||||
def attr___traceback__(self):
|
||||
builtins_ast_module = AstroidManager().builtins_module
|
||||
traceback_type = builtins_ast_module[types.TracebackType.__name__]
|
||||
return traceback_type.instantiate_class()
|
||||
|
||||
|
||||
class SyntaxErrorInstanceModel(ExceptionInstanceModel):
|
||||
@property
|
||||
def attr_text(self):
|
||||
return node_classes.Const("")
|
||||
|
||||
|
||||
class OSErrorInstanceModel(ExceptionInstanceModel):
|
||||
@property
|
||||
def attr_filename(self):
|
||||
return node_classes.Const("")
|
||||
|
||||
@property
|
||||
def attr_errno(self):
|
||||
return node_classes.Const(0)
|
||||
|
||||
@property
|
||||
def attr_strerror(self):
|
||||
return node_classes.Const("")
|
||||
|
||||
attr_filename2 = attr_filename
|
||||
|
||||
|
||||
class ImportErrorInstanceModel(ExceptionInstanceModel):
|
||||
@property
|
||||
def attr_name(self):
|
||||
return node_classes.Const("")
|
||||
|
||||
@property
|
||||
def attr_path(self):
|
||||
return node_classes.Const("")
|
||||
|
||||
|
||||
class UnicodeDecodeErrorInstanceModel(ExceptionInstanceModel):
|
||||
@property
|
||||
def attr_object(self):
|
||||
return node_classes.Const("")
|
||||
|
||||
|
||||
BUILTIN_EXCEPTIONS = {
|
||||
"builtins.SyntaxError": SyntaxErrorInstanceModel,
|
||||
"builtins.ImportError": ImportErrorInstanceModel,
|
||||
"builtins.UnicodeDecodeError": UnicodeDecodeErrorInstanceModel,
|
||||
# These are all similar to OSError in terms of attributes
|
||||
"builtins.OSError": OSErrorInstanceModel,
|
||||
"builtins.BlockingIOError": OSErrorInstanceModel,
|
||||
"builtins.BrokenPipeError": OSErrorInstanceModel,
|
||||
"builtins.ChildProcessError": OSErrorInstanceModel,
|
||||
"builtins.ConnectionAbortedError": OSErrorInstanceModel,
|
||||
"builtins.ConnectionError": OSErrorInstanceModel,
|
||||
"builtins.ConnectionRefusedError": OSErrorInstanceModel,
|
||||
"builtins.ConnectionResetError": OSErrorInstanceModel,
|
||||
"builtins.FileExistsError": OSErrorInstanceModel,
|
||||
"builtins.FileNotFoundError": OSErrorInstanceModel,
|
||||
"builtins.InterruptedError": OSErrorInstanceModel,
|
||||
"builtins.IsADirectoryError": OSErrorInstanceModel,
|
||||
"builtins.NotADirectoryError": OSErrorInstanceModel,
|
||||
"builtins.PermissionError": OSErrorInstanceModel,
|
||||
"builtins.ProcessLookupError": OSErrorInstanceModel,
|
||||
"builtins.TimeoutError": OSErrorInstanceModel,
|
||||
}
|
||||
|
||||
|
||||
class DictModel(ObjectModel):
|
||||
@property
|
||||
def attr___class__(self):
|
||||
return self._instance._proxied
|
||||
|
||||
def _generic_dict_attribute(self, obj, name):
|
||||
"""Generate a bound method that can infer the given *obj*."""
|
||||
|
||||
class DictMethodBoundMethod(astroid.BoundMethod):
|
||||
def infer_call_result(self, caller, context=None):
|
||||
yield obj
|
||||
|
||||
meth = next(self._instance._proxied.igetattr(name), None)
|
||||
return DictMethodBoundMethod(proxy=meth, bound=self._instance)
|
||||
|
||||
@property
|
||||
def attr_items(self):
|
||||
elems = []
|
||||
obj = node_classes.List(parent=self._instance)
|
||||
for key, value in self._instance.items:
|
||||
elem = node_classes.Tuple(parent=obj)
|
||||
elem.postinit((key, value))
|
||||
elems.append(elem)
|
||||
obj.postinit(elts=elems)
|
||||
|
||||
obj = objects.DictItems(obj)
|
||||
return self._generic_dict_attribute(obj, "items")
|
||||
|
||||
@property
|
||||
def attr_keys(self):
|
||||
keys = [key for (key, _) in self._instance.items]
|
||||
obj = node_classes.List(parent=self._instance)
|
||||
obj.postinit(elts=keys)
|
||||
|
||||
obj = objects.DictKeys(obj)
|
||||
return self._generic_dict_attribute(obj, "keys")
|
||||
|
||||
@property
|
||||
def attr_values(self):
|
||||
|
||||
values = [value for (_, value) in self._instance.items]
|
||||
obj = node_classes.List(parent=self._instance)
|
||||
obj.postinit(values)
|
||||
|
||||
obj = objects.DictValues(obj)
|
||||
return self._generic_dict_attribute(obj, "values")
|
||||
|
||||
|
||||
class PropertyModel(ObjectModel):
|
||||
"""Model for a builtin property"""
|
||||
|
||||
# pylint: disable=import-outside-toplevel
|
||||
def _init_function(self, name):
|
||||
from astroid.nodes.node_classes import Arguments
|
||||
from astroid.nodes.scoped_nodes import FunctionDef
|
||||
|
||||
args = Arguments()
|
||||
args.postinit(
|
||||
args=[],
|
||||
defaults=[],
|
||||
kwonlyargs=[],
|
||||
kw_defaults=[],
|
||||
annotations=[],
|
||||
posonlyargs=[],
|
||||
posonlyargs_annotations=[],
|
||||
kwonlyargs_annotations=[],
|
||||
)
|
||||
|
||||
function = FunctionDef(name=name, parent=self._instance)
|
||||
|
||||
function.postinit(args=args, body=[])
|
||||
return function
|
||||
|
||||
@property
|
||||
def attr_fget(self):
|
||||
from astroid.nodes.scoped_nodes import FunctionDef
|
||||
|
||||
func = self._instance
|
||||
|
||||
class PropertyFuncAccessor(FunctionDef):
|
||||
def infer_call_result(self, caller=None, context=None):
|
||||
nonlocal func
|
||||
if caller and len(caller.args) != 1:
|
||||
raise InferenceError(
|
||||
"fget() needs a single argument", target=self, context=context
|
||||
)
|
||||
|
||||
yield from func.function.infer_call_result(
|
||||
caller=caller, context=context
|
||||
)
|
||||
|
||||
property_accessor = PropertyFuncAccessor(name="fget", parent=self._instance)
|
||||
property_accessor.postinit(args=func.args, body=func.body)
|
||||
return property_accessor
|
||||
|
||||
@property
|
||||
def attr_fset(self):
|
||||
from astroid.nodes.scoped_nodes import FunctionDef
|
||||
|
||||
func = self._instance
|
||||
|
||||
def find_setter(func: "Property") -> Optional[astroid.FunctionDef]:
|
||||
"""
|
||||
Given a property, find the corresponding setter function and returns it.
|
||||
|
||||
:param func: property for which the setter has to be found
|
||||
:return: the setter function or None
|
||||
"""
|
||||
for target in [
|
||||
t for t in func.parent.get_children() if t.name == func.function.name
|
||||
]:
|
||||
for dec_name in target.decoratornames():
|
||||
if dec_name.endswith(func.function.name + ".setter"):
|
||||
return target
|
||||
return None
|
||||
|
||||
func_setter = find_setter(func)
|
||||
if not func_setter:
|
||||
raise InferenceError(
|
||||
f"Unable to find the setter of property {func.function.name}"
|
||||
)
|
||||
|
||||
class PropertyFuncAccessor(FunctionDef):
|
||||
def infer_call_result(self, caller=None, context=None):
|
||||
nonlocal func_setter
|
||||
if caller and len(caller.args) != 2:
|
||||
raise InferenceError(
|
||||
"fset() needs two arguments", target=self, context=context
|
||||
)
|
||||
yield from func_setter.infer_call_result(caller=caller, context=context)
|
||||
|
||||
property_accessor = PropertyFuncAccessor(name="fset", parent=self._instance)
|
||||
property_accessor.postinit(args=func_setter.args, body=func_setter.body)
|
||||
return property_accessor
|
||||
|
||||
@property
|
||||
def attr_setter(self):
|
||||
return self._init_function("setter")
|
||||
|
||||
@property
|
||||
def attr_deleter(self):
|
||||
return self._init_function("deleter")
|
||||
|
||||
@property
|
||||
def attr_getter(self):
|
||||
return self._init_function("getter")
|
||||
|
||||
# pylint: enable=import-outside-toplevel
|
|
@ -1,375 +0,0 @@
|
|||
# Copyright (c) 2006-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 BioGeek <jeroen.vangoey@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2017 Iva Miholic <ivamiho@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Raphael Gaschignard <raphael@makeleaps.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Raphael Gaschignard <raphael@rtpg.co>
|
||||
# Copyright (c) 2020 Anubhav <35621759+anubh-v@users.noreply.github.com>
|
||||
# Copyright (c) 2020 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 grayjk <grayjk@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Andrew Haigh <hello@nelf.in>
|
||||
# Copyright (c) 2021 DudeNr33 <3929834+DudeNr33@users.noreply.github.com>
|
||||
# Copyright (c) 2021 pre-commit-ci[bot] <bot@noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""astroid manager: avoid multiple astroid build of a same module when
|
||||
possible by providing a class responsible to get astroid representation
|
||||
from various source and using a cache of built modules)
|
||||
"""
|
||||
|
||||
import os
|
||||
import types
|
||||
import zipimport
|
||||
from typing import TYPE_CHECKING, ClassVar, List, Optional
|
||||
|
||||
from astroid.exceptions import AstroidBuildingError, AstroidImportError
|
||||
from astroid.interpreter._import import spec
|
||||
from astroid.modutils import (
|
||||
NoSourceFile,
|
||||
file_info_from_modpath,
|
||||
get_source_file,
|
||||
is_module_name_part_of_extension_package_whitelist,
|
||||
is_python_source,
|
||||
is_standard_module,
|
||||
load_module_from_name,
|
||||
modpath_from_file,
|
||||
)
|
||||
from astroid.transforms import TransformVisitor
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid import nodes
|
||||
|
||||
ZIP_IMPORT_EXTS = (".zip", ".egg", ".whl", ".pyz", ".pyzw")
|
||||
|
||||
|
||||
def safe_repr(obj):
|
||||
try:
|
||||
return repr(obj)
|
||||
except Exception: # pylint: disable=broad-except
|
||||
return "???"
|
||||
|
||||
|
||||
class AstroidManager:
|
||||
"""Responsible to build astroid from files or modules.
|
||||
|
||||
Use the Borg (singleton) pattern.
|
||||
"""
|
||||
|
||||
name = "astroid loader"
|
||||
brain = {}
|
||||
max_inferable_values: ClassVar[int] = 100
|
||||
|
||||
def __init__(self):
|
||||
self.__dict__ = AstroidManager.brain
|
||||
if not self.__dict__:
|
||||
# NOTE: cache entries are added by the [re]builder
|
||||
self.astroid_cache = {}
|
||||
self._mod_file_cache = {}
|
||||
self._failed_import_hooks = []
|
||||
self.always_load_extensions = False
|
||||
self.optimize_ast = False
|
||||
self.extension_package_whitelist = set()
|
||||
self._transform = TransformVisitor()
|
||||
|
||||
@property
|
||||
def register_transform(self):
|
||||
# This and unregister_transform below are exported for convenience
|
||||
return self._transform.register_transform
|
||||
|
||||
@property
|
||||
def unregister_transform(self):
|
||||
return self._transform.unregister_transform
|
||||
|
||||
@property
|
||||
def builtins_module(self):
|
||||
return self.astroid_cache["builtins"]
|
||||
|
||||
def visit_transforms(self, node):
|
||||
"""Visit the transforms and apply them to the given *node*."""
|
||||
return self._transform.visit(node)
|
||||
|
||||
def ast_from_file(self, filepath, modname=None, fallback=True, source=False):
|
||||
"""given a module name, return the astroid object"""
|
||||
try:
|
||||
filepath = get_source_file(filepath, include_no_ext=True)
|
||||
source = True
|
||||
except NoSourceFile:
|
||||
pass
|
||||
if modname is None:
|
||||
try:
|
||||
modname = ".".join(modpath_from_file(filepath))
|
||||
except ImportError:
|
||||
modname = filepath
|
||||
if (
|
||||
modname in self.astroid_cache
|
||||
and self.astroid_cache[modname].file == filepath
|
||||
):
|
||||
return self.astroid_cache[modname]
|
||||
if source:
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
return AstroidBuilder(self).file_build(filepath, modname)
|
||||
if fallback and modname:
|
||||
return self.ast_from_module_name(modname)
|
||||
raise AstroidBuildingError("Unable to build an AST for {path}.", path=filepath)
|
||||
|
||||
def ast_from_string(self, data, modname="", filepath=None):
|
||||
"""Given some source code as a string, return its corresponding astroid object"""
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
return AstroidBuilder(self).string_build(data, modname, filepath)
|
||||
|
||||
def _build_stub_module(self, modname):
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
return AstroidBuilder(self).string_build("", modname)
|
||||
|
||||
def _build_namespace_module(self, modname: str, path: List[str]) -> "nodes.Module":
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.builder import build_namespace_package_module
|
||||
|
||||
return build_namespace_package_module(modname, path)
|
||||
|
||||
def _can_load_extension(self, modname: str) -> bool:
|
||||
if self.always_load_extensions:
|
||||
return True
|
||||
if is_standard_module(modname):
|
||||
return True
|
||||
return is_module_name_part_of_extension_package_whitelist(
|
||||
modname, self.extension_package_whitelist
|
||||
)
|
||||
|
||||
def ast_from_module_name(self, modname, context_file=None):
|
||||
"""given a module name, return the astroid object"""
|
||||
if modname in self.astroid_cache:
|
||||
return self.astroid_cache[modname]
|
||||
if modname == "__main__":
|
||||
return self._build_stub_module(modname)
|
||||
if context_file:
|
||||
old_cwd = os.getcwd()
|
||||
os.chdir(os.path.dirname(context_file))
|
||||
try:
|
||||
found_spec = self.file_from_module_name(modname, context_file)
|
||||
if found_spec.type == spec.ModuleType.PY_ZIPMODULE:
|
||||
module = self.zip_import_data(found_spec.location)
|
||||
if module is not None:
|
||||
return module
|
||||
|
||||
elif found_spec.type in (
|
||||
spec.ModuleType.C_BUILTIN,
|
||||
spec.ModuleType.C_EXTENSION,
|
||||
):
|
||||
if (
|
||||
found_spec.type == spec.ModuleType.C_EXTENSION
|
||||
and not self._can_load_extension(modname)
|
||||
):
|
||||
return self._build_stub_module(modname)
|
||||
try:
|
||||
module = load_module_from_name(modname)
|
||||
except Exception as e:
|
||||
raise AstroidImportError(
|
||||
"Loading {modname} failed with:\n{error}",
|
||||
modname=modname,
|
||||
path=found_spec.location,
|
||||
) from e
|
||||
return self.ast_from_module(module, modname)
|
||||
|
||||
elif found_spec.type == spec.ModuleType.PY_COMPILED:
|
||||
raise AstroidImportError(
|
||||
"Unable to load compiled module {modname}.",
|
||||
modname=modname,
|
||||
path=found_spec.location,
|
||||
)
|
||||
|
||||
elif found_spec.type == spec.ModuleType.PY_NAMESPACE:
|
||||
return self._build_namespace_module(
|
||||
modname, found_spec.submodule_search_locations
|
||||
)
|
||||
elif found_spec.type == spec.ModuleType.PY_FROZEN:
|
||||
return self._build_stub_module(modname)
|
||||
|
||||
if found_spec.location is None:
|
||||
raise AstroidImportError(
|
||||
"Can't find a file for module {modname}.", modname=modname
|
||||
)
|
||||
|
||||
return self.ast_from_file(found_spec.location, modname, fallback=False)
|
||||
except AstroidBuildingError as e:
|
||||
for hook in self._failed_import_hooks:
|
||||
try:
|
||||
return hook(modname)
|
||||
except AstroidBuildingError:
|
||||
pass
|
||||
raise e
|
||||
finally:
|
||||
if context_file:
|
||||
os.chdir(old_cwd)
|
||||
|
||||
def zip_import_data(self, filepath):
|
||||
if zipimport is None:
|
||||
return None
|
||||
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
builder = AstroidBuilder(self)
|
||||
for ext in ZIP_IMPORT_EXTS:
|
||||
try:
|
||||
eggpath, resource = filepath.rsplit(ext + os.path.sep, 1)
|
||||
except ValueError:
|
||||
continue
|
||||
try:
|
||||
# pylint: disable-next=no-member
|
||||
importer = zipimport.zipimporter(eggpath + ext)
|
||||
zmodname = resource.replace(os.path.sep, ".")
|
||||
if importer.is_package(resource):
|
||||
zmodname = zmodname + ".__init__"
|
||||
module = builder.string_build(
|
||||
importer.get_source(resource), zmodname, filepath
|
||||
)
|
||||
return module
|
||||
except Exception: # pylint: disable=broad-except
|
||||
continue
|
||||
return None
|
||||
|
||||
def file_from_module_name(self, modname, contextfile):
|
||||
try:
|
||||
value = self._mod_file_cache[(modname, contextfile)]
|
||||
except KeyError:
|
||||
try:
|
||||
value = file_info_from_modpath(
|
||||
modname.split("."), context_file=contextfile
|
||||
)
|
||||
except ImportError as e:
|
||||
value = AstroidImportError(
|
||||
"Failed to import module {modname} with error:\n{error}.",
|
||||
modname=modname,
|
||||
# we remove the traceback here to save on memory usage (since these exceptions are cached)
|
||||
error=e.with_traceback(None),
|
||||
)
|
||||
self._mod_file_cache[(modname, contextfile)] = value
|
||||
if isinstance(value, AstroidBuildingError):
|
||||
# we remove the traceback here to save on memory usage (since these exceptions are cached)
|
||||
raise value.with_traceback(None)
|
||||
return value
|
||||
|
||||
def ast_from_module(self, module: types.ModuleType, modname: Optional[str] = None):
|
||||
"""given an imported module, return the astroid object"""
|
||||
modname = modname or module.__name__
|
||||
if modname in self.astroid_cache:
|
||||
return self.astroid_cache[modname]
|
||||
try:
|
||||
# some builtin modules don't have __file__ attribute
|
||||
filepath = module.__file__
|
||||
if is_python_source(filepath):
|
||||
return self.ast_from_file(filepath, modname)
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
# pylint: disable=import-outside-toplevel; circular import
|
||||
from astroid.builder import AstroidBuilder
|
||||
|
||||
return AstroidBuilder(self).module_build(module, modname)
|
||||
|
||||
def ast_from_class(self, klass, modname=None):
|
||||
"""get astroid for the given class"""
|
||||
if modname is None:
|
||||
try:
|
||||
modname = klass.__module__
|
||||
except AttributeError as exc:
|
||||
raise AstroidBuildingError(
|
||||
"Unable to get module for class {class_name}.",
|
||||
cls=klass,
|
||||
class_repr=safe_repr(klass),
|
||||
modname=modname,
|
||||
) from exc
|
||||
modastroid = self.ast_from_module_name(modname)
|
||||
return modastroid.getattr(klass.__name__)[0] # XXX
|
||||
|
||||
def infer_ast_from_something(self, obj, context=None):
|
||||
"""infer astroid for the given class"""
|
||||
if hasattr(obj, "__class__") and not isinstance(obj, type):
|
||||
klass = obj.__class__
|
||||
else:
|
||||
klass = obj
|
||||
try:
|
||||
modname = klass.__module__
|
||||
except AttributeError as exc:
|
||||
raise AstroidBuildingError(
|
||||
"Unable to get module for {class_repr}.",
|
||||
cls=klass,
|
||||
class_repr=safe_repr(klass),
|
||||
) from exc
|
||||
except Exception as exc:
|
||||
raise AstroidImportError(
|
||||
"Unexpected error while retrieving module for {class_repr}:\n"
|
||||
"{error}",
|
||||
cls=klass,
|
||||
class_repr=safe_repr(klass),
|
||||
) from exc
|
||||
try:
|
||||
name = klass.__name__
|
||||
except AttributeError as exc:
|
||||
raise AstroidBuildingError(
|
||||
"Unable to get name for {class_repr}:\n",
|
||||
cls=klass,
|
||||
class_repr=safe_repr(klass),
|
||||
) from exc
|
||||
except Exception as exc:
|
||||
raise AstroidImportError(
|
||||
"Unexpected error while retrieving name for {class_repr}:\n" "{error}",
|
||||
cls=klass,
|
||||
class_repr=safe_repr(klass),
|
||||
) from exc
|
||||
# take care, on living object __module__ is regularly wrong :(
|
||||
modastroid = self.ast_from_module_name(modname)
|
||||
if klass is obj:
|
||||
for inferred in modastroid.igetattr(name, context):
|
||||
yield inferred
|
||||
else:
|
||||
for inferred in modastroid.igetattr(name, context):
|
||||
yield inferred.instantiate_class()
|
||||
|
||||
def register_failed_import_hook(self, hook):
|
||||
"""Registers a hook to resolve imports that cannot be found otherwise.
|
||||
|
||||
`hook` must be a function that accepts a single argument `modname` which
|
||||
contains the name of the module or package that could not be imported.
|
||||
If `hook` can resolve the import, must return a node of type `astroid.Module`,
|
||||
otherwise, it must raise `AstroidBuildingError`.
|
||||
"""
|
||||
self._failed_import_hooks.append(hook)
|
||||
|
||||
def cache_module(self, module):
|
||||
"""Cache a module if no module with the same name is known yet."""
|
||||
self.astroid_cache.setdefault(module.name, module)
|
||||
|
||||
def bootstrap(self):
|
||||
"""Bootstrap the required AST modules needed for the manager to work
|
||||
|
||||
The bootstrap usually involves building the AST for the builtins
|
||||
module, which is required by the rest of astroid to work correctly.
|
||||
"""
|
||||
from astroid import raw_building # pylint: disable=import-outside-toplevel
|
||||
|
||||
raw_building._astroid_bootstrapping()
|
||||
|
||||
def clear_cache(self):
|
||||
"""Clear the underlying cache. Also bootstraps the builtins module."""
|
||||
self.astroid_cache.clear()
|
||||
self.bootstrap()
|
|
@ -1,169 +0,0 @@
|
|||
# Copyright (c) 2010-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014-2016, 2018 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 pre-commit-ci[bot] <bot@noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""This module contains some mixins for the different nodes.
|
||||
"""
|
||||
import itertools
|
||||
from typing import TYPE_CHECKING, Optional
|
||||
|
||||
from astroid import decorators
|
||||
from astroid.exceptions import AttributeInferenceError
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid import nodes
|
||||
|
||||
|
||||
class BlockRangeMixIn:
|
||||
"""override block range"""
|
||||
|
||||
@decorators.cachedproperty
|
||||
def blockstart_tolineno(self):
|
||||
return self.lineno
|
||||
|
||||
def _elsed_block_range(self, lineno, orelse, last=None):
|
||||
"""handle block line numbers range for try/finally, for, if and while
|
||||
statements
|
||||
"""
|
||||
if lineno == self.fromlineno:
|
||||
return lineno, lineno
|
||||
if orelse:
|
||||
if lineno >= orelse[0].fromlineno:
|
||||
return lineno, orelse[-1].tolineno
|
||||
return lineno, orelse[0].fromlineno - 1
|
||||
return lineno, last or self.tolineno
|
||||
|
||||
|
||||
class FilterStmtsMixin:
|
||||
"""Mixin for statement filtering and assignment type"""
|
||||
|
||||
def _get_filtered_stmts(self, _, node, _stmts, mystmt: Optional["nodes.Statement"]):
|
||||
"""method used in _filter_stmts to get statements and trigger break"""
|
||||
if self.statement(future=True) is mystmt:
|
||||
# original node's statement is the assignment, only keep
|
||||
# current node (gen exp, list comp)
|
||||
return [node], True
|
||||
return _stmts, False
|
||||
|
||||
def assign_type(self):
|
||||
return self
|
||||
|
||||
|
||||
class AssignTypeMixin:
|
||||
def assign_type(self):
|
||||
return self
|
||||
|
||||
def _get_filtered_stmts(
|
||||
self, lookup_node, node, _stmts, mystmt: Optional["nodes.Statement"]
|
||||
):
|
||||
"""method used in filter_stmts"""
|
||||
if self is mystmt:
|
||||
return _stmts, True
|
||||
if self.statement(future=True) is mystmt:
|
||||
# original node's statement is the assignment, only keep
|
||||
# current node (gen exp, list comp)
|
||||
return [node], True
|
||||
return _stmts, False
|
||||
|
||||
|
||||
class ParentAssignTypeMixin(AssignTypeMixin):
|
||||
def assign_type(self):
|
||||
return self.parent.assign_type()
|
||||
|
||||
|
||||
class ImportFromMixin(FilterStmtsMixin):
|
||||
"""MixIn for From and Import Nodes"""
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
return name
|
||||
|
||||
def do_import_module(self, modname=None):
|
||||
"""return the ast for a module whose name is <modname> imported by <self>"""
|
||||
# handle special case where we are on a package node importing a module
|
||||
# using the same name as the package, which may end in an infinite loop
|
||||
# on relative imports
|
||||
# XXX: no more needed ?
|
||||
mymodule = self.root()
|
||||
level = getattr(self, "level", None) # Import as no level
|
||||
if modname is None:
|
||||
modname = self.modname
|
||||
# XXX we should investigate deeper if we really want to check
|
||||
# importing itself: modname and mymodule.name be relative or absolute
|
||||
if mymodule.relative_to_absolute_name(modname, level) == mymodule.name:
|
||||
# FIXME: we used to raise InferenceError here, but why ?
|
||||
return mymodule
|
||||
|
||||
return mymodule.import_module(
|
||||
modname, level=level, relative_only=level and level >= 1
|
||||
)
|
||||
|
||||
def real_name(self, asname):
|
||||
"""get name from 'as' name"""
|
||||
for name, _asname in self.names:
|
||||
if name == "*":
|
||||
return asname
|
||||
if not _asname:
|
||||
name = name.split(".", 1)[0]
|
||||
_asname = name
|
||||
if asname == _asname:
|
||||
return name
|
||||
raise AttributeInferenceError(
|
||||
"Could not find original name for {attribute} in {target!r}",
|
||||
target=self,
|
||||
attribute=asname,
|
||||
)
|
||||
|
||||
|
||||
class MultiLineBlockMixin:
|
||||
"""Mixin for nodes with multi-line blocks, e.g. For and FunctionDef.
|
||||
Note that this does not apply to every node with a `body` field.
|
||||
For instance, an If node has a multi-line body, but the body of an
|
||||
IfExpr is not multi-line, and hence cannot contain Return nodes,
|
||||
Assign nodes, etc.
|
||||
"""
|
||||
|
||||
@decorators.cachedproperty
|
||||
def _multi_line_blocks(self):
|
||||
return tuple(getattr(self, field) for field in self._multi_line_block_fields)
|
||||
|
||||
def _get_return_nodes_skip_functions(self):
|
||||
for block in self._multi_line_blocks:
|
||||
for child_node in block:
|
||||
if child_node.is_function:
|
||||
continue
|
||||
yield from child_node._get_return_nodes_skip_functions()
|
||||
|
||||
def _get_yield_nodes_skip_lambdas(self):
|
||||
for block in self._multi_line_blocks:
|
||||
for child_node in block:
|
||||
if child_node.is_lambda:
|
||||
continue
|
||||
yield from child_node._get_yield_nodes_skip_lambdas()
|
||||
|
||||
@decorators.cached
|
||||
def _get_assign_nodes(self):
|
||||
children_assign_nodes = (
|
||||
child_node._get_assign_nodes()
|
||||
for block in self._multi_line_blocks
|
||||
for child_node in block
|
||||
)
|
||||
return list(itertools.chain.from_iterable(children_assign_nodes))
|
||||
|
||||
|
||||
class NoChildrenMixin:
|
||||
"""Mixin for nodes with no children, e.g. Pass."""
|
||||
|
||||
def get_children(self):
|
||||
yield from ()
|
|
@ -1,680 +0,0 @@
|
|||
# Copyright (c) 2014-2018, 2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2014 Denis Laxalde <denis.laxalde@logilab.fr>
|
||||
# Copyright (c) 2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2014 Eevee (Alex Munroe) <amunroe@yelp.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2015 Radosław Ganczarek <radoslaw@ganczarek.in>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 Mario Corchero <mcorcherojim@bloomberg.net>
|
||||
# Copyright (c) 2018 Mario Corchero <mariocj89@gmail.com>
|
||||
# Copyright (c) 2018 Anthony Sottile <asottile@umich.edu>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2019 markmcclain <markmcclain@users.noreply.github.com>
|
||||
# Copyright (c) 2019 BasPH <BasPH@users.noreply.github.com>
|
||||
# Copyright (c) 2020-2021 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2020 Peter Kolbus <peter.kolbus@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Keichi Takahashi <hello@keichi.dev>
|
||||
# Copyright (c) 2021 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 DudeNr33 <3929834+DudeNr33@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Python modules manipulation utility functions.
|
||||
|
||||
:type PY_SOURCE_EXTS: tuple(str)
|
||||
:var PY_SOURCE_EXTS: list of possible python source file extension
|
||||
|
||||
:type STD_LIB_DIRS: set of str
|
||||
:var STD_LIB_DIRS: directories where standard modules are located
|
||||
|
||||
:type BUILTIN_MODULES: dict
|
||||
:var BUILTIN_MODULES: dictionary with builtin module names has key
|
||||
"""
|
||||
|
||||
# We disable the import-error so pylint can work without distutils installed.
|
||||
# pylint: disable=no-name-in-module,useless-suppression
|
||||
|
||||
import importlib
|
||||
import importlib.machinery
|
||||
import importlib.util
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import sys
|
||||
import types
|
||||
from distutils.errors import DistutilsPlatformError # pylint: disable=import-error
|
||||
from distutils.sysconfig import get_python_lib # pylint: disable=import-error
|
||||
from typing import Dict, Set
|
||||
|
||||
from astroid.interpreter._import import spec, util
|
||||
|
||||
# distutils is replaced by virtualenv with a module that does
|
||||
# weird path manipulations in order to get to the
|
||||
# real distutils module.
|
||||
|
||||
|
||||
if sys.platform.startswith("win"):
|
||||
PY_SOURCE_EXTS = ("py", "pyw")
|
||||
PY_COMPILED_EXTS = ("dll", "pyd")
|
||||
else:
|
||||
PY_SOURCE_EXTS = ("py",)
|
||||
PY_COMPILED_EXTS = ("so",)
|
||||
|
||||
|
||||
try:
|
||||
# The explicit sys.prefix is to work around a patch in virtualenv that
|
||||
# replaces the 'real' sys.prefix (i.e. the location of the binary)
|
||||
# with the prefix from which the virtualenv was created. This throws
|
||||
# off the detection logic for standard library modules, thus the
|
||||
# workaround.
|
||||
STD_LIB_DIRS = {
|
||||
get_python_lib(standard_lib=True, prefix=sys.prefix),
|
||||
# Take care of installations where exec_prefix != prefix.
|
||||
get_python_lib(standard_lib=True, prefix=sys.exec_prefix),
|
||||
get_python_lib(standard_lib=True),
|
||||
}
|
||||
# get_python_lib(standard_lib=1) is not available on pypy, set STD_LIB_DIR to
|
||||
# non-valid path, see https://bugs.pypy.org/issue1164
|
||||
except DistutilsPlatformError:
|
||||
STD_LIB_DIRS = set()
|
||||
|
||||
if os.name == "nt":
|
||||
STD_LIB_DIRS.add(os.path.join(sys.prefix, "dlls"))
|
||||
try:
|
||||
# real_prefix is defined when running inside virtual environments,
|
||||
# created with the **virtualenv** library.
|
||||
# Deprecated in virtualenv==16.7.9
|
||||
# See: https://github.com/pypa/virtualenv/issues/1622
|
||||
STD_LIB_DIRS.add(os.path.join(sys.real_prefix, "dlls")) # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
# sys.base_exec_prefix is always defined, but in a virtual environment
|
||||
# created with the stdlib **venv** module, it points to the original
|
||||
# installation, if the virtual env is activated.
|
||||
try:
|
||||
STD_LIB_DIRS.add(os.path.join(sys.base_exec_prefix, "dlls"))
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
if platform.python_implementation() == "PyPy":
|
||||
# The get_python_lib(standard_lib=True) function does not give valid
|
||||
# result with pypy in a virtualenv.
|
||||
# In a virtual environment, with CPython implementation the call to this function returns a path toward
|
||||
# the binary (its libraries) which has been used to create the virtual environment.
|
||||
# Not with pypy implementation.
|
||||
# The only way to retrieve such information is to use the sys.base_prefix hint.
|
||||
# It's worth noticing that under CPython implementation the return values of
|
||||
# get_python_lib(standard_lib=True) and get_python_lib(santdard_lib=True, prefix=sys.base_prefix)
|
||||
# are the same.
|
||||
# In the lines above, we could have replace the call to get_python_lib(standard=True)
|
||||
# with the one using prefix=sys.base_prefix but we prefer modifying only what deals with pypy.
|
||||
STD_LIB_DIRS.add(get_python_lib(standard_lib=True, prefix=sys.base_prefix))
|
||||
_root = os.path.join(sys.prefix, "lib_pypy")
|
||||
STD_LIB_DIRS.add(_root)
|
||||
try:
|
||||
# real_prefix is defined when running inside virtualenv.
|
||||
STD_LIB_DIRS.add(os.path.join(sys.base_prefix, "lib_pypy"))
|
||||
except AttributeError:
|
||||
pass
|
||||
del _root
|
||||
if os.name == "posix":
|
||||
# Need the real prefix if we're in a virtualenv, otherwise
|
||||
# the usual one will do.
|
||||
# Deprecated in virtualenv==16.7.9
|
||||
# See: https://github.com/pypa/virtualenv/issues/1622
|
||||
try:
|
||||
prefix = sys.real_prefix # type: ignore[attr-defined]
|
||||
except AttributeError:
|
||||
prefix = sys.prefix
|
||||
|
||||
def _posix_path(path):
|
||||
base_python = "python%d.%d" % sys.version_info[:2]
|
||||
return os.path.join(prefix, path, base_python)
|
||||
|
||||
STD_LIB_DIRS.add(_posix_path("lib"))
|
||||
if sys.maxsize > 2 ** 32:
|
||||
# This tries to fix a problem with /usr/lib64 builds,
|
||||
# where systems are running both 32-bit and 64-bit code
|
||||
# on the same machine, which reflects into the places where
|
||||
# standard library could be found. More details can be found
|
||||
# here http://bugs.python.org/issue1294959.
|
||||
# An easy reproducing case would be
|
||||
# https://github.com/PyCQA/pylint/issues/712#issuecomment-163178753
|
||||
STD_LIB_DIRS.add(_posix_path("lib64"))
|
||||
|
||||
EXT_LIB_DIRS = {get_python_lib(), get_python_lib(True)}
|
||||
IS_JYTHON = platform.python_implementation() == "Jython"
|
||||
BUILTIN_MODULES = dict.fromkeys(sys.builtin_module_names, True)
|
||||
|
||||
|
||||
class NoSourceFile(Exception):
|
||||
"""exception raised when we are not able to get a python
|
||||
source file for a precompiled file
|
||||
"""
|
||||
|
||||
|
||||
def _normalize_path(path: str) -> str:
|
||||
"""Resolve symlinks in path and convert to absolute path.
|
||||
|
||||
Note that environment variables and ~ in the path need to be expanded in
|
||||
advance.
|
||||
|
||||
This can be cached by using _cache_normalize_path.
|
||||
"""
|
||||
return os.path.normcase(os.path.realpath(path))
|
||||
|
||||
|
||||
def _path_from_filename(filename, is_jython=IS_JYTHON):
|
||||
if not is_jython:
|
||||
return filename
|
||||
head, has_pyclass, _ = filename.partition("$py.class")
|
||||
if has_pyclass:
|
||||
return head + ".py"
|
||||
return filename
|
||||
|
||||
|
||||
def _handle_blacklist(blacklist, dirnames, filenames):
|
||||
"""remove files/directories in the black list
|
||||
|
||||
dirnames/filenames are usually from os.walk
|
||||
"""
|
||||
for norecurs in blacklist:
|
||||
if norecurs in dirnames:
|
||||
dirnames.remove(norecurs)
|
||||
elif norecurs in filenames:
|
||||
filenames.remove(norecurs)
|
||||
|
||||
|
||||
_NORM_PATH_CACHE: Dict[str, str] = {}
|
||||
|
||||
|
||||
def _cache_normalize_path(path: str) -> str:
|
||||
"""Normalize path with caching."""
|
||||
# _module_file calls abspath on every path in sys.path every time it's
|
||||
# called; on a larger codebase this easily adds up to half a second just
|
||||
# assembling path components. This cache alleviates that.
|
||||
try:
|
||||
return _NORM_PATH_CACHE[path]
|
||||
except KeyError:
|
||||
if not path: # don't cache result for ''
|
||||
return _normalize_path(path)
|
||||
result = _NORM_PATH_CACHE[path] = _normalize_path(path)
|
||||
return result
|
||||
|
||||
|
||||
def load_module_from_name(dotted_name: str) -> types.ModuleType:
|
||||
"""Load a Python module from its name.
|
||||
|
||||
:type dotted_name: str
|
||||
:param dotted_name: python name of a module or package
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
try:
|
||||
return sys.modules[dotted_name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
return importlib.import_module(dotted_name)
|
||||
|
||||
|
||||
def load_module_from_modpath(parts):
|
||||
"""Load a python module from its split name.
|
||||
|
||||
:type parts: list(str) or tuple(str)
|
||||
:param parts:
|
||||
python name of a module or package split on '.'
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
return load_module_from_name(".".join(parts))
|
||||
|
||||
|
||||
def load_module_from_file(filepath: str):
|
||||
"""Load a Python module from it's path.
|
||||
|
||||
:type filepath: str
|
||||
:param filepath: path to the python module or package
|
||||
|
||||
:raise ImportError: if the module or package is not found
|
||||
|
||||
:rtype: module
|
||||
:return: the loaded module
|
||||
"""
|
||||
modpath = modpath_from_file(filepath)
|
||||
return load_module_from_modpath(modpath)
|
||||
|
||||
|
||||
def check_modpath_has_init(path, mod_path):
|
||||
"""check there are some __init__.py all along the way"""
|
||||
modpath = []
|
||||
for part in mod_path:
|
||||
modpath.append(part)
|
||||
path = os.path.join(path, part)
|
||||
if not _has_init(path):
|
||||
old_namespace = util.is_namespace(".".join(modpath))
|
||||
if not old_namespace:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def _get_relative_base_path(filename, path_to_check):
|
||||
"""Extracts the relative mod path of the file to import from
|
||||
|
||||
Check if a file is within the passed in path and if so, returns the
|
||||
relative mod path from the one passed in.
|
||||
|
||||
If the filename is no in path_to_check, returns None
|
||||
|
||||
Note this function will look for both abs and realpath of the file,
|
||||
this allows to find the relative base path even if the file is a
|
||||
symlink of a file in the passed in path
|
||||
|
||||
Examples:
|
||||
_get_relative_base_path("/a/b/c/d.py", "/a/b") -> ["c","d"]
|
||||
_get_relative_base_path("/a/b/c/d.py", "/dev") -> None
|
||||
"""
|
||||
importable_path = None
|
||||
path_to_check = os.path.normcase(path_to_check)
|
||||
abs_filename = os.path.abspath(filename)
|
||||
if os.path.normcase(abs_filename).startswith(path_to_check):
|
||||
importable_path = abs_filename
|
||||
|
||||
real_filename = os.path.realpath(filename)
|
||||
if os.path.normcase(real_filename).startswith(path_to_check):
|
||||
importable_path = real_filename
|
||||
|
||||
# if "var" in path_to_check:
|
||||
# breakpoint()
|
||||
|
||||
if importable_path:
|
||||
base_path = os.path.splitext(importable_path)[0]
|
||||
relative_base_path = base_path[len(path_to_check) :]
|
||||
return [pkg for pkg in relative_base_path.split(os.sep) if pkg]
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def modpath_from_file_with_callback(filename, path=None, is_package_cb=None):
|
||||
filename = os.path.expanduser(_path_from_filename(filename))
|
||||
paths_to_check = sys.path.copy()
|
||||
if path:
|
||||
paths_to_check += path
|
||||
for pathname in itertools.chain(
|
||||
paths_to_check, map(_cache_normalize_path, paths_to_check)
|
||||
):
|
||||
if not pathname:
|
||||
continue
|
||||
modpath = _get_relative_base_path(filename, pathname)
|
||||
if not modpath:
|
||||
continue
|
||||
if is_package_cb(pathname, modpath[:-1]):
|
||||
return modpath
|
||||
|
||||
raise ImportError(
|
||||
"Unable to find module for {} in {}".format(filename, ", \n".join(sys.path))
|
||||
)
|
||||
|
||||
|
||||
def modpath_from_file(filename, path=None):
|
||||
"""Get the corresponding split module's name from a filename
|
||||
|
||||
This function will return the name of a module or package split on `.`.
|
||||
|
||||
:type filename: str
|
||||
:param filename: file's path for which we want the module's name
|
||||
|
||||
:type Optional[List[str]] path:
|
||||
Optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:raise ImportError:
|
||||
if the corresponding module's name has not been found
|
||||
|
||||
:rtype: list(str)
|
||||
:return: the corresponding split module's name
|
||||
"""
|
||||
return modpath_from_file_with_callback(filename, path, check_modpath_has_init)
|
||||
|
||||
|
||||
def file_from_modpath(modpath, path=None, context_file=None):
|
||||
return file_info_from_modpath(modpath, path, context_file).location
|
||||
|
||||
|
||||
def file_info_from_modpath(modpath, path=None, context_file=None):
|
||||
"""given a mod path (i.e. split module / package name), return the
|
||||
corresponding file, giving priority to source file over precompiled
|
||||
file if it exists
|
||||
|
||||
:type modpath: list or tuple
|
||||
:param modpath:
|
||||
split module's name (i.e name of a module or package split
|
||||
on '.')
|
||||
(this means explicit relative imports that start with dots have
|
||||
empty strings in this list!)
|
||||
|
||||
:type path: list or None
|
||||
:param path:
|
||||
optional list of path where the module or package should be
|
||||
searched (use sys.path if nothing or None is given)
|
||||
|
||||
:type context_file: str or None
|
||||
:param context_file:
|
||||
context file to consider, necessary if the identifier has been
|
||||
introduced using a relative import unresolvable in the actual
|
||||
context (i.e. modutils)
|
||||
|
||||
:raise ImportError: if there is no such module in the directory
|
||||
|
||||
:rtype: (str or None, import type)
|
||||
:return:
|
||||
the path to the module's file or None if it's an integrated
|
||||
builtin module such as 'sys'
|
||||
"""
|
||||
if context_file is not None:
|
||||
context = os.path.dirname(context_file)
|
||||
else:
|
||||
context = context_file
|
||||
if modpath[0] == "xml":
|
||||
# handle _xmlplus
|
||||
try:
|
||||
return _spec_from_modpath(["_xmlplus"] + modpath[1:], path, context)
|
||||
except ImportError:
|
||||
return _spec_from_modpath(modpath, path, context)
|
||||
elif modpath == ["os", "path"]:
|
||||
# FIXME: currently ignoring search_path...
|
||||
return spec.ModuleSpec(
|
||||
name="os.path",
|
||||
location=os.path.__file__,
|
||||
module_type=spec.ModuleType.PY_SOURCE,
|
||||
)
|
||||
return _spec_from_modpath(modpath, path, context)
|
||||
|
||||
|
||||
def get_module_part(dotted_name, context_file=None):
|
||||
"""given a dotted name return the module part of the name :
|
||||
|
||||
>>> get_module_part('astroid.as_string.dump')
|
||||
'astroid.as_string'
|
||||
|
||||
:type dotted_name: str
|
||||
:param dotted_name: full name of the identifier we are interested in
|
||||
|
||||
:type context_file: str or None
|
||||
:param context_file:
|
||||
context file to consider, necessary if the identifier has been
|
||||
introduced using a relative import unresolvable in the actual
|
||||
context (i.e. modutils)
|
||||
|
||||
|
||||
:raise ImportError: if there is no such module in the directory
|
||||
|
||||
:rtype: str or None
|
||||
:return:
|
||||
the module part of the name or None if we have not been able at
|
||||
all to import the given name
|
||||
|
||||
XXX: deprecated, since it doesn't handle package precedence over module
|
||||
(see #10066)
|
||||
"""
|
||||
# os.path trick
|
||||
if dotted_name.startswith("os.path"):
|
||||
return "os.path"
|
||||
parts = dotted_name.split(".")
|
||||
if context_file is not None:
|
||||
# first check for builtin module which won't be considered latter
|
||||
# in that case (path != None)
|
||||
if parts[0] in BUILTIN_MODULES:
|
||||
if len(parts) > 2:
|
||||
raise ImportError(dotted_name)
|
||||
return parts[0]
|
||||
# don't use += or insert, we want a new list to be created !
|
||||
path = None
|
||||
starti = 0
|
||||
if parts[0] == "":
|
||||
assert (
|
||||
context_file is not None
|
||||
), "explicit relative import, but no context_file?"
|
||||
path = [] # prevent resolving the import non-relatively
|
||||
starti = 1
|
||||
while parts[starti] == "": # for all further dots: change context
|
||||
starti += 1
|
||||
context_file = os.path.dirname(context_file)
|
||||
for i in range(starti, len(parts)):
|
||||
try:
|
||||
file_from_modpath(
|
||||
parts[starti : i + 1], path=path, context_file=context_file
|
||||
)
|
||||
except ImportError:
|
||||
if i < max(1, len(parts) - 2):
|
||||
raise
|
||||
return ".".join(parts[:i])
|
||||
return dotted_name
|
||||
|
||||
|
||||
def get_module_files(src_directory, blacklist, list_all=False):
|
||||
"""given a package directory return a list of all available python
|
||||
module's files in the package and its subpackages
|
||||
|
||||
:type src_directory: str
|
||||
:param src_directory:
|
||||
path of the directory corresponding to the package
|
||||
|
||||
:type blacklist: list or tuple
|
||||
:param blacklist: iterable
|
||||
list of files or directories to ignore.
|
||||
|
||||
:type list_all: bool
|
||||
:param list_all:
|
||||
get files from all paths, including ones without __init__.py
|
||||
|
||||
:rtype: list
|
||||
:return:
|
||||
the list of all available python module's files in the package and
|
||||
its subpackages
|
||||
"""
|
||||
files = []
|
||||
for directory, dirnames, filenames in os.walk(src_directory):
|
||||
if directory in blacklist:
|
||||
continue
|
||||
_handle_blacklist(blacklist, dirnames, filenames)
|
||||
# check for __init__.py
|
||||
if not list_all and "__init__.py" not in filenames:
|
||||
dirnames[:] = ()
|
||||
continue
|
||||
for filename in filenames:
|
||||
if _is_python_file(filename):
|
||||
src = os.path.join(directory, filename)
|
||||
files.append(src)
|
||||
return files
|
||||
|
||||
|
||||
def get_source_file(filename, include_no_ext=False):
|
||||
"""given a python module's file name return the matching source file
|
||||
name (the filename will be returned identically if it's already an
|
||||
absolute path to a python source file...)
|
||||
|
||||
:type filename: str
|
||||
:param filename: python module's file name
|
||||
|
||||
|
||||
:raise NoSourceFile: if no source file exists on the file system
|
||||
|
||||
:rtype: str
|
||||
:return: the absolute path of the source file if it exists
|
||||
"""
|
||||
filename = os.path.abspath(_path_from_filename(filename))
|
||||
base, orig_ext = os.path.splitext(filename)
|
||||
for ext in PY_SOURCE_EXTS:
|
||||
source_path = f"{base}.{ext}"
|
||||
if os.path.exists(source_path):
|
||||
return source_path
|
||||
if include_no_ext and not orig_ext and os.path.exists(base):
|
||||
return base
|
||||
raise NoSourceFile(filename)
|
||||
|
||||
|
||||
def is_python_source(filename):
|
||||
"""
|
||||
rtype: bool
|
||||
return: True if the filename is a python source file
|
||||
"""
|
||||
return os.path.splitext(filename)[1][1:] in PY_SOURCE_EXTS
|
||||
|
||||
|
||||
def is_standard_module(modname, std_path=None):
|
||||
"""try to guess if a module is a standard python module (by default,
|
||||
see `std_path` parameter's description)
|
||||
|
||||
:type modname: str
|
||||
:param modname: name of the module we are interested in
|
||||
|
||||
:type std_path: list(str) or tuple(str)
|
||||
:param std_path: list of path considered has standard
|
||||
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the module:
|
||||
- is located on the path listed in one of the directory in `std_path`
|
||||
- is a built-in module
|
||||
"""
|
||||
modname = modname.split(".")[0]
|
||||
try:
|
||||
filename = file_from_modpath([modname])
|
||||
except ImportError:
|
||||
# import failed, i'm probably not so wrong by supposing it's
|
||||
# not standard...
|
||||
return False
|
||||
# modules which are not living in a file are considered standard
|
||||
# (sys and __builtin__ for instance)
|
||||
if filename is None:
|
||||
# we assume there are no namespaces in stdlib
|
||||
return not util.is_namespace(modname)
|
||||
filename = _normalize_path(filename)
|
||||
for path in EXT_LIB_DIRS:
|
||||
if filename.startswith(_cache_normalize_path(path)):
|
||||
return False
|
||||
if std_path is None:
|
||||
std_path = STD_LIB_DIRS
|
||||
|
||||
return any(filename.startswith(_cache_normalize_path(path)) for path in std_path)
|
||||
|
||||
|
||||
def is_relative(modname, from_file):
|
||||
"""return true if the given module name is relative to the given
|
||||
file name
|
||||
|
||||
:type modname: str
|
||||
:param modname: name of the module we are interested in
|
||||
|
||||
:type from_file: str
|
||||
:param from_file:
|
||||
path of the module from which modname has been imported
|
||||
|
||||
:rtype: bool
|
||||
:return:
|
||||
true if the module has been imported relatively to `from_file`
|
||||
"""
|
||||
if not os.path.isdir(from_file):
|
||||
from_file = os.path.dirname(from_file)
|
||||
if from_file in sys.path:
|
||||
return False
|
||||
return bool(
|
||||
importlib.machinery.PathFinder.find_spec(
|
||||
modname.split(".", maxsplit=1)[0], [from_file]
|
||||
)
|
||||
)
|
||||
|
||||
|
||||
# internal only functions #####################################################
|
||||
|
||||
|
||||
def _spec_from_modpath(modpath, path=None, context=None):
|
||||
"""given a mod path (i.e. split module / package name), return the
|
||||
corresponding spec
|
||||
|
||||
this function is used internally, see `file_from_modpath`'s
|
||||
documentation for more information
|
||||
"""
|
||||
assert modpath
|
||||
location = None
|
||||
if context is not None:
|
||||
try:
|
||||
found_spec = spec.find_spec(modpath, [context])
|
||||
location = found_spec.location
|
||||
except ImportError:
|
||||
found_spec = spec.find_spec(modpath, path)
|
||||
location = found_spec.location
|
||||
else:
|
||||
found_spec = spec.find_spec(modpath, path)
|
||||
if found_spec.type == spec.ModuleType.PY_COMPILED:
|
||||
try:
|
||||
location = get_source_file(found_spec.location)
|
||||
return found_spec._replace(
|
||||
location=location, type=spec.ModuleType.PY_SOURCE
|
||||
)
|
||||
except NoSourceFile:
|
||||
return found_spec._replace(location=location)
|
||||
elif found_spec.type == spec.ModuleType.C_BUILTIN:
|
||||
# integrated builtin module
|
||||
return found_spec._replace(location=None)
|
||||
elif found_spec.type == spec.ModuleType.PKG_DIRECTORY:
|
||||
location = _has_init(found_spec.location)
|
||||
return found_spec._replace(location=location, type=spec.ModuleType.PY_SOURCE)
|
||||
return found_spec
|
||||
|
||||
|
||||
def _is_python_file(filename):
|
||||
"""return true if the given filename should be considered as a python file
|
||||
|
||||
.pyc and .pyo are ignored
|
||||
"""
|
||||
return filename.endswith((".py", ".so", ".pyd", ".pyw"))
|
||||
|
||||
|
||||
def _has_init(directory):
|
||||
"""if the given directory has a valid __init__ file, return its path,
|
||||
else return None
|
||||
"""
|
||||
mod_or_pack = os.path.join(directory, "__init__")
|
||||
for ext in PY_SOURCE_EXTS + ("pyc", "pyo"):
|
||||
if os.path.exists(mod_or_pack + "." + ext):
|
||||
return mod_or_pack + "." + ext
|
||||
return None
|
||||
|
||||
|
||||
def is_namespace(specobj):
|
||||
return specobj.type == spec.ModuleType.PY_NAMESPACE
|
||||
|
||||
|
||||
def is_directory(specobj):
|
||||
return specobj.type == spec.ModuleType.PKG_DIRECTORY
|
||||
|
||||
|
||||
def is_module_name_part_of_extension_package_whitelist(
|
||||
module_name: str, package_whitelist: Set[str]
|
||||
) -> bool:
|
||||
"""
|
||||
Returns True if one part of the module name is in the package whitelist
|
||||
|
||||
>>> is_module_name_part_of_extension_package_whitelist('numpy.core.umath', {'numpy'})
|
||||
True
|
||||
"""
|
||||
parts = module_name.split(".")
|
||||
return any(
|
||||
".".join(parts[:x]) in package_whitelist for x in range(1, len(parts) + 1)
|
||||
)
|
|
@ -1,93 +0,0 @@
|
|||
# pylint: disable=unused-import
|
||||
|
||||
import warnings
|
||||
|
||||
from astroid.nodes.node_classes import ( # pylint: disable=redefined-builtin (Ellipsis)
|
||||
CONST_CLS,
|
||||
AnnAssign,
|
||||
Arguments,
|
||||
Assert,
|
||||
Assign,
|
||||
AssignAttr,
|
||||
AssignName,
|
||||
AsyncFor,
|
||||
AsyncWith,
|
||||
Attribute,
|
||||
AugAssign,
|
||||
Await,
|
||||
BaseContainer,
|
||||
BinOp,
|
||||
BoolOp,
|
||||
Break,
|
||||
Call,
|
||||
Compare,
|
||||
Comprehension,
|
||||
Const,
|
||||
Continue,
|
||||
Decorators,
|
||||
DelAttr,
|
||||
Delete,
|
||||
DelName,
|
||||
Dict,
|
||||
DictUnpack,
|
||||
Ellipsis,
|
||||
EmptyNode,
|
||||
EvaluatedObject,
|
||||
ExceptHandler,
|
||||
Expr,
|
||||
ExtSlice,
|
||||
For,
|
||||
FormattedValue,
|
||||
Global,
|
||||
If,
|
||||
IfExp,
|
||||
Import,
|
||||
ImportFrom,
|
||||
Index,
|
||||
JoinedStr,
|
||||
Keyword,
|
||||
List,
|
||||
LookupMixIn,
|
||||
Match,
|
||||
MatchAs,
|
||||
MatchCase,
|
||||
MatchClass,
|
||||
MatchMapping,
|
||||
MatchOr,
|
||||
MatchSequence,
|
||||
MatchSingleton,
|
||||
MatchStar,
|
||||
MatchValue,
|
||||
Name,
|
||||
NamedExpr,
|
||||
NodeNG,
|
||||
Nonlocal,
|
||||
Pass,
|
||||
Pattern,
|
||||
Raise,
|
||||
Return,
|
||||
Set,
|
||||
Slice,
|
||||
Starred,
|
||||
Subscript,
|
||||
TryExcept,
|
||||
TryFinally,
|
||||
Tuple,
|
||||
UnaryOp,
|
||||
Unknown,
|
||||
While,
|
||||
With,
|
||||
Yield,
|
||||
YieldFrom,
|
||||
are_exclusive,
|
||||
const_factory,
|
||||
unpack_infer,
|
||||
)
|
||||
|
||||
# We cannot create a __all__ here because it would create a circular import
|
||||
# Please remove astroid/scoped_nodes.py|astroid/node_classes.py in autoflake
|
||||
# exclude when removing this file.
|
||||
warnings.warn(
|
||||
"The 'astroid.node_classes' module is deprecated and will be replaced by 'astroid.nodes' in astroid 3.0.0",
|
||||
DeprecationWarning,
|
||||
)
|
|
@ -1,309 +0,0 @@
|
|||
# Copyright (c) 2006-2011, 2013 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
|
||||
# Copyright (c) 2014-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
|
||||
# Copyright (c) 2017 Ashley Whetter <ashley@awhetter.co.uk>
|
||||
# Copyright (c) 2017 rr- <rr-@sakuya.pl>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""Every available node class.
|
||||
|
||||
.. seealso::
|
||||
:doc:`ast documentation <green_tree_snakes:nodes>`
|
||||
|
||||
All nodes inherit from :class:`~astroid.nodes.node_classes.NodeNG`.
|
||||
"""
|
||||
|
||||
# Nodes not present in the builtin ast module: DictUnpack, Unknown, and EvaluatedObject.
|
||||
|
||||
from astroid.nodes.node_classes import ( # pylint: disable=redefined-builtin (Ellipsis)
|
||||
CONST_CLS,
|
||||
AnnAssign,
|
||||
Arguments,
|
||||
Assert,
|
||||
Assign,
|
||||
AssignAttr,
|
||||
AssignName,
|
||||
AsyncFor,
|
||||
AsyncWith,
|
||||
Attribute,
|
||||
AugAssign,
|
||||
Await,
|
||||
BaseContainer,
|
||||
BinOp,
|
||||
BoolOp,
|
||||
Break,
|
||||
Call,
|
||||
Compare,
|
||||
Comprehension,
|
||||
Const,
|
||||
Continue,
|
||||
Decorators,
|
||||
DelAttr,
|
||||
Delete,
|
||||
DelName,
|
||||
Dict,
|
||||
DictUnpack,
|
||||
Ellipsis,
|
||||
EmptyNode,
|
||||
EvaluatedObject,
|
||||
ExceptHandler,
|
||||
Expr,
|
||||
ExtSlice,
|
||||
For,
|
||||
FormattedValue,
|
||||
Global,
|
||||
If,
|
||||
IfExp,
|
||||
Import,
|
||||
ImportFrom,
|
||||
Index,
|
||||
JoinedStr,
|
||||
Keyword,
|
||||
List,
|
||||
Match,
|
||||
MatchAs,
|
||||
MatchCase,
|
||||
MatchClass,
|
||||
MatchMapping,
|
||||
MatchOr,
|
||||
MatchSequence,
|
||||
MatchSingleton,
|
||||
MatchStar,
|
||||
MatchValue,
|
||||
Name,
|
||||
NamedExpr,
|
||||
NodeNG,
|
||||
Nonlocal,
|
||||
Pass,
|
||||
Pattern,
|
||||
Raise,
|
||||
Return,
|
||||
Set,
|
||||
Slice,
|
||||
Starred,
|
||||
Statement,
|
||||
Subscript,
|
||||
TryExcept,
|
||||
TryFinally,
|
||||
Tuple,
|
||||
UnaryOp,
|
||||
Unknown,
|
||||
While,
|
||||
With,
|
||||
Yield,
|
||||
YieldFrom,
|
||||
are_exclusive,
|
||||
const_factory,
|
||||
unpack_infer,
|
||||
)
|
||||
from astroid.nodes.scoped_nodes import (
|
||||
AsyncFunctionDef,
|
||||
ClassDef,
|
||||
ComprehensionScope,
|
||||
DictComp,
|
||||
FunctionDef,
|
||||
GeneratorExp,
|
||||
Lambda,
|
||||
ListComp,
|
||||
LocalsDictNodeNG,
|
||||
Module,
|
||||
SetComp,
|
||||
builtin_lookup,
|
||||
function_to_method,
|
||||
get_wrapping_class,
|
||||
)
|
||||
|
||||
_BaseContainer = BaseContainer # TODO Remove for astroid 3.0
|
||||
|
||||
ALL_NODE_CLASSES = (
|
||||
_BaseContainer,
|
||||
BaseContainer,
|
||||
AnnAssign,
|
||||
Arguments,
|
||||
Assert,
|
||||
Assign,
|
||||
AssignAttr,
|
||||
AssignName,
|
||||
AsyncFor,
|
||||
AsyncFunctionDef,
|
||||
AsyncWith,
|
||||
Attribute,
|
||||
AugAssign,
|
||||
Await,
|
||||
BinOp,
|
||||
BoolOp,
|
||||
Break,
|
||||
Call,
|
||||
ClassDef,
|
||||
Compare,
|
||||
Comprehension,
|
||||
ComprehensionScope,
|
||||
Const,
|
||||
const_factory,
|
||||
Continue,
|
||||
Decorators,
|
||||
DelAttr,
|
||||
Delete,
|
||||
DelName,
|
||||
Dict,
|
||||
DictComp,
|
||||
DictUnpack,
|
||||
Ellipsis,
|
||||
EmptyNode,
|
||||
EvaluatedObject,
|
||||
ExceptHandler,
|
||||
Expr,
|
||||
ExtSlice,
|
||||
For,
|
||||
FormattedValue,
|
||||
FunctionDef,
|
||||
GeneratorExp,
|
||||
Global,
|
||||
If,
|
||||
IfExp,
|
||||
Import,
|
||||
ImportFrom,
|
||||
Index,
|
||||
JoinedStr,
|
||||
Keyword,
|
||||
Lambda,
|
||||
List,
|
||||
ListComp,
|
||||
LocalsDictNodeNG,
|
||||
Match,
|
||||
MatchAs,
|
||||
MatchCase,
|
||||
MatchClass,
|
||||
MatchMapping,
|
||||
MatchOr,
|
||||
MatchSequence,
|
||||
MatchSingleton,
|
||||
MatchStar,
|
||||
MatchValue,
|
||||
Module,
|
||||
Name,
|
||||
NamedExpr,
|
||||
NodeNG,
|
||||
Nonlocal,
|
||||
Pass,
|
||||
Pattern,
|
||||
Raise,
|
||||
Return,
|
||||
Set,
|
||||
SetComp,
|
||||
Slice,
|
||||
Starred,
|
||||
Subscript,
|
||||
TryExcept,
|
||||
TryFinally,
|
||||
Tuple,
|
||||
UnaryOp,
|
||||
Unknown,
|
||||
While,
|
||||
With,
|
||||
Yield,
|
||||
YieldFrom,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"AnnAssign",
|
||||
"are_exclusive",
|
||||
"Arguments",
|
||||
"Assert",
|
||||
"Assign",
|
||||
"AssignAttr",
|
||||
"AssignName",
|
||||
"AsyncFor",
|
||||
"AsyncFunctionDef",
|
||||
"AsyncWith",
|
||||
"Attribute",
|
||||
"AugAssign",
|
||||
"Await",
|
||||
"BinOp",
|
||||
"BoolOp",
|
||||
"Break",
|
||||
"builtin_lookup",
|
||||
"Call",
|
||||
"ClassDef",
|
||||
"CONST_CLS",
|
||||
"Compare",
|
||||
"Comprehension",
|
||||
"ComprehensionScope",
|
||||
"Const",
|
||||
"const_factory",
|
||||
"Continue",
|
||||
"Decorators",
|
||||
"DelAttr",
|
||||
"Delete",
|
||||
"DelName",
|
||||
"Dict",
|
||||
"DictComp",
|
||||
"DictUnpack",
|
||||
"Ellipsis",
|
||||
"EmptyNode",
|
||||
"EvaluatedObject",
|
||||
"ExceptHandler",
|
||||
"Expr",
|
||||
"ExtSlice",
|
||||
"For",
|
||||
"FormattedValue",
|
||||
"FunctionDef",
|
||||
"function_to_method",
|
||||
"GeneratorExp",
|
||||
"get_wrapping_class",
|
||||
"Global",
|
||||
"If",
|
||||
"IfExp",
|
||||
"Import",
|
||||
"ImportFrom",
|
||||
"Index",
|
||||
"JoinedStr",
|
||||
"Keyword",
|
||||
"Lambda",
|
||||
"List",
|
||||
"ListComp",
|
||||
"LocalsDictNodeNG",
|
||||
"Match",
|
||||
"MatchAs",
|
||||
"MatchCase",
|
||||
"MatchClass",
|
||||
"MatchMapping",
|
||||
"MatchOr",
|
||||
"MatchSequence",
|
||||
"MatchSingleton",
|
||||
"MatchStar",
|
||||
"MatchValue",
|
||||
"Module",
|
||||
"Name",
|
||||
"NamedExpr",
|
||||
"NodeNG",
|
||||
"Nonlocal",
|
||||
"Pass",
|
||||
"Raise",
|
||||
"Return",
|
||||
"Set",
|
||||
"SetComp",
|
||||
"Slice",
|
||||
"Starred",
|
||||
"Statement",
|
||||
"Subscript",
|
||||
"TryExcept",
|
||||
"TryFinally",
|
||||
"Tuple",
|
||||
"UnaryOp",
|
||||
"Unknown",
|
||||
"unpack_infer",
|
||||
"While",
|
||||
"With",
|
||||
"Yield",
|
||||
"YieldFrom",
|
||||
)
|
|
@ -1,663 +0,0 @@
|
|||
# Copyright (c) 2009-2011, 2013-2014 LOGILAB S.A. (Paris, FRANCE) <contact@logilab.fr>
|
||||
# Copyright (c) 2010 Daniel Harding <dharding@gmail.com>
|
||||
# Copyright (c) 2013-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2013-2014 Google, Inc.
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2016 Jared Garst <jgarst@users.noreply.github.com>
|
||||
# Copyright (c) 2016 Jakub Wilk <jwilk@jwilk.net>
|
||||
# Copyright (c) 2017, 2019 Łukasz Rogalski <rogalski.91@gmail.com>
|
||||
# Copyright (c) 2017 rr- <rr-@sakuya.pl>
|
||||
# Copyright (c) 2018 Serhiy Storchaka <storchaka@gmail.com>
|
||||
# Copyright (c) 2018 Ville Skyttä <ville.skytta@iki.fi>
|
||||
# Copyright (c) 2018 brendanator <brendan.maginnis@gmail.com>
|
||||
# Copyright (c) 2018 Nick Drozd <nicholasdrozd@gmail.com>
|
||||
# Copyright (c) 2019 Alex Hall <alex.mojaki@gmail.com>
|
||||
# Copyright (c) 2019 Hugo van Kemenade <hugovk@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
# Copyright (c) 2021 Daniël van Noord <13665637+DanielNoord@users.noreply.github.com>
|
||||
# Copyright (c) 2021 pre-commit-ci[bot] <bot@noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
"""This module renders Astroid nodes as string"""
|
||||
from typing import TYPE_CHECKING, List
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid.nodes.node_classes import (
|
||||
Match,
|
||||
MatchAs,
|
||||
MatchCase,
|
||||
MatchClass,
|
||||
MatchMapping,
|
||||
MatchOr,
|
||||
MatchSequence,
|
||||
MatchSingleton,
|
||||
MatchStar,
|
||||
MatchValue,
|
||||
Unknown,
|
||||
)
|
||||
|
||||
# pylint: disable=unused-argument
|
||||
|
||||
DOC_NEWLINE = "\0"
|
||||
|
||||
|
||||
# Visitor pattern require argument all the time and is not better with staticmethod
|
||||
# noinspection PyUnusedLocal,PyMethodMayBeStatic
|
||||
class AsStringVisitor:
|
||||
"""Visitor to render an Astroid node as a valid python code string"""
|
||||
|
||||
def __init__(self, indent=" "):
|
||||
self.indent = indent
|
||||
|
||||
def __call__(self, node):
|
||||
"""Makes this visitor behave as a simple function"""
|
||||
return node.accept(self).replace(DOC_NEWLINE, "\n")
|
||||
|
||||
def _docs_dedent(self, doc):
|
||||
"""Stop newlines in docs being indented by self._stmt_list"""
|
||||
return '\n{}"""{}"""'.format(self.indent, doc.replace("\n", DOC_NEWLINE))
|
||||
|
||||
def _stmt_list(self, stmts, indent=True):
|
||||
"""return a list of nodes to string"""
|
||||
stmts = "\n".join(nstr for nstr in [n.accept(self) for n in stmts] if nstr)
|
||||
if indent:
|
||||
return self.indent + stmts.replace("\n", "\n" + self.indent)
|
||||
|
||||
return stmts
|
||||
|
||||
def _precedence_parens(self, node, child, is_left=True):
|
||||
"""Wrap child in parens only if required to keep same semantics"""
|
||||
if self._should_wrap(node, child, is_left):
|
||||
return f"({child.accept(self)})"
|
||||
|
||||
return child.accept(self)
|
||||
|
||||
def _should_wrap(self, node, child, is_left):
|
||||
"""Wrap child if:
|
||||
- it has lower precedence
|
||||
- same precedence with position opposite to associativity direction
|
||||
"""
|
||||
node_precedence = node.op_precedence()
|
||||
child_precedence = child.op_precedence()
|
||||
|
||||
if node_precedence > child_precedence:
|
||||
# 3 * (4 + 5)
|
||||
return True
|
||||
|
||||
if (
|
||||
node_precedence == child_precedence
|
||||
and is_left != node.op_left_associative()
|
||||
):
|
||||
# 3 - (4 - 5)
|
||||
# (2**3)**4
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
# visit_<node> methods ###########################################
|
||||
|
||||
def visit_await(self, node):
|
||||
return f"await {node.value.accept(self)}"
|
||||
|
||||
def visit_asyncwith(self, node):
|
||||
return f"async {self.visit_with(node)}"
|
||||
|
||||
def visit_asyncfor(self, node):
|
||||
return f"async {self.visit_for(node)}"
|
||||
|
||||
def visit_arguments(self, node):
|
||||
"""return an astroid.Function node as string"""
|
||||
return node.format_args()
|
||||
|
||||
def visit_assignattr(self, node):
|
||||
"""return an astroid.AssAttr node as string"""
|
||||
return self.visit_attribute(node)
|
||||
|
||||
def visit_assert(self, node):
|
||||
"""return an astroid.Assert node as string"""
|
||||
if node.fail:
|
||||
return f"assert {node.test.accept(self)}, {node.fail.accept(self)}"
|
||||
return f"assert {node.test.accept(self)}"
|
||||
|
||||
def visit_assignname(self, node):
|
||||
"""return an astroid.AssName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_assign(self, node):
|
||||
"""return an astroid.Assign node as string"""
|
||||
lhs = " = ".join(n.accept(self) for n in node.targets)
|
||||
return f"{lhs} = {node.value.accept(self)}"
|
||||
|
||||
def visit_augassign(self, node):
|
||||
"""return an astroid.AugAssign node as string"""
|
||||
return f"{node.target.accept(self)} {node.op} {node.value.accept(self)}"
|
||||
|
||||
def visit_annassign(self, node):
|
||||
"""Return an astroid.AugAssign node as string"""
|
||||
|
||||
target = node.target.accept(self)
|
||||
annotation = node.annotation.accept(self)
|
||||
if node.value is None:
|
||||
return f"{target}: {annotation}"
|
||||
return f"{target}: {annotation} = {node.value.accept(self)}"
|
||||
|
||||
def visit_binop(self, node):
|
||||
"""return an astroid.BinOp node as string"""
|
||||
left = self._precedence_parens(node, node.left)
|
||||
right = self._precedence_parens(node, node.right, is_left=False)
|
||||
if node.op == "**":
|
||||
return f"{left}{node.op}{right}"
|
||||
|
||||
return f"{left} {node.op} {right}"
|
||||
|
||||
def visit_boolop(self, node):
|
||||
"""return an astroid.BoolOp node as string"""
|
||||
values = [f"{self._precedence_parens(node, n)}" for n in node.values]
|
||||
return (f" {node.op} ").join(values)
|
||||
|
||||
def visit_break(self, node):
|
||||
"""return an astroid.Break node as string"""
|
||||
return "break"
|
||||
|
||||
def visit_call(self, node):
|
||||
"""return an astroid.Call node as string"""
|
||||
expr_str = self._precedence_parens(node, node.func)
|
||||
args = [arg.accept(self) for arg in node.args]
|
||||
if node.keywords:
|
||||
keywords = [kwarg.accept(self) for kwarg in node.keywords]
|
||||
else:
|
||||
keywords = []
|
||||
|
||||
args.extend(keywords)
|
||||
return f"{expr_str}({', '.join(args)})"
|
||||
|
||||
def visit_classdef(self, node):
|
||||
"""return an astroid.ClassDef node as string"""
|
||||
decorate = node.decorators.accept(self) if node.decorators else ""
|
||||
args = [n.accept(self) for n in node.bases]
|
||||
if node._metaclass and not node.has_metaclass_hack():
|
||||
args.append("metaclass=" + node._metaclass.accept(self))
|
||||
args += [n.accept(self) for n in node.keywords]
|
||||
args = f"({', '.join(args)})" if args else ""
|
||||
docs = self._docs_dedent(node.doc) if node.doc else ""
|
||||
return "\n\n{}class {}{}:{}\n{}\n".format(
|
||||
decorate, node.name, args, docs, self._stmt_list(node.body)
|
||||
)
|
||||
|
||||
def visit_compare(self, node):
|
||||
"""return an astroid.Compare node as string"""
|
||||
rhs_str = " ".join(
|
||||
f"{op} {self._precedence_parens(node, expr, is_left=False)}"
|
||||
for op, expr in node.ops
|
||||
)
|
||||
return f"{self._precedence_parens(node, node.left)} {rhs_str}"
|
||||
|
||||
def visit_comprehension(self, node):
|
||||
"""return an astroid.Comprehension node as string"""
|
||||
ifs = "".join(f" if {n.accept(self)}" for n in node.ifs)
|
||||
generated = f"for {node.target.accept(self)} in {node.iter.accept(self)}{ifs}"
|
||||
return f"{'async ' if node.is_async else ''}{generated}"
|
||||
|
||||
def visit_const(self, node):
|
||||
"""return an astroid.Const node as string"""
|
||||
if node.value is Ellipsis:
|
||||
return "..."
|
||||
return repr(node.value)
|
||||
|
||||
def visit_continue(self, node):
|
||||
"""return an astroid.Continue node as string"""
|
||||
return "continue"
|
||||
|
||||
def visit_delete(self, node): # XXX check if correct
|
||||
"""return an astroid.Delete node as string"""
|
||||
return f"del {', '.join(child.accept(self) for child in node.targets)}"
|
||||
|
||||
def visit_delattr(self, node):
|
||||
"""return an astroid.DelAttr node as string"""
|
||||
return self.visit_attribute(node)
|
||||
|
||||
def visit_delname(self, node):
|
||||
"""return an astroid.DelName node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_decorators(self, node):
|
||||
"""return an astroid.Decorators node as string"""
|
||||
return "@%s\n" % "\n@".join(item.accept(self) for item in node.nodes)
|
||||
|
||||
def visit_dict(self, node):
|
||||
"""return an astroid.Dict node as string"""
|
||||
return "{%s}" % ", ".join(self._visit_dict(node))
|
||||
|
||||
def _visit_dict(self, node):
|
||||
for key, value in node.items:
|
||||
key = key.accept(self)
|
||||
value = value.accept(self)
|
||||
if key == "**":
|
||||
# It can only be a DictUnpack node.
|
||||
yield key + value
|
||||
else:
|
||||
yield f"{key}: {value}"
|
||||
|
||||
def visit_dictunpack(self, node):
|
||||
return "**"
|
||||
|
||||
def visit_dictcomp(self, node):
|
||||
"""return an astroid.DictComp node as string"""
|
||||
return "{{{}: {} {}}}".format(
|
||||
node.key.accept(self),
|
||||
node.value.accept(self),
|
||||
" ".join(n.accept(self) for n in node.generators),
|
||||
)
|
||||
|
||||
def visit_expr(self, node):
|
||||
"""return an astroid.Discard node as string"""
|
||||
return node.value.accept(self)
|
||||
|
||||
def visit_emptynode(self, node):
|
||||
"""dummy method for visiting an Empty node"""
|
||||
return ""
|
||||
|
||||
def visit_excepthandler(self, node):
|
||||
if node.type:
|
||||
if node.name:
|
||||
excs = f"except {node.type.accept(self)} as {node.name.accept(self)}"
|
||||
else:
|
||||
excs = f"except {node.type.accept(self)}"
|
||||
else:
|
||||
excs = "except"
|
||||
return f"{excs}:\n{self._stmt_list(node.body)}"
|
||||
|
||||
def visit_empty(self, node):
|
||||
"""return an Empty node as string"""
|
||||
return ""
|
||||
|
||||
def visit_for(self, node):
|
||||
"""return an astroid.For node as string"""
|
||||
fors = "for {} in {}:\n{}".format(
|
||||
node.target.accept(self), node.iter.accept(self), self._stmt_list(node.body)
|
||||
)
|
||||
if node.orelse:
|
||||
fors = f"{fors}\nelse:\n{self._stmt_list(node.orelse)}"
|
||||
return fors
|
||||
|
||||
def visit_importfrom(self, node):
|
||||
"""return an astroid.ImportFrom node as string"""
|
||||
return "from {} import {}".format(
|
||||
"." * (node.level or 0) + node.modname, _import_string(node.names)
|
||||
)
|
||||
|
||||
def visit_joinedstr(self, node):
|
||||
string = "".join(
|
||||
# Use repr on the string literal parts
|
||||
# to get proper escapes, e.g. \n, \\, \"
|
||||
# But strip the quotes off the ends
|
||||
# (they will always be one character: ' or ")
|
||||
repr(value.value)[1:-1]
|
||||
# Literal braces must be doubled to escape them
|
||||
.replace("{", "{{").replace("}", "}}")
|
||||
# Each value in values is either a string literal (Const)
|
||||
# or a FormattedValue
|
||||
if type(value).__name__ == "Const" else value.accept(self)
|
||||
for value in node.values
|
||||
)
|
||||
|
||||
# Try to find surrounding quotes that don't appear at all in the string.
|
||||
# Because the formatted values inside {} can't contain backslash (\)
|
||||
# using a triple quote is sometimes necessary
|
||||
for quote in ("'", '"', '"""', "'''"):
|
||||
if quote not in string:
|
||||
break
|
||||
|
||||
return "f" + quote + string + quote
|
||||
|
||||
def visit_formattedvalue(self, node):
|
||||
result = node.value.accept(self)
|
||||
if node.conversion and node.conversion >= 0:
|
||||
# e.g. if node.conversion == 114: result += "!r"
|
||||
result += "!" + chr(node.conversion)
|
||||
if node.format_spec:
|
||||
# The format spec is itself a JoinedString, i.e. an f-string
|
||||
# We strip the f and quotes of the ends
|
||||
result += ":" + node.format_spec.accept(self)[2:-1]
|
||||
return "{%s}" % result
|
||||
|
||||
def handle_functiondef(self, node, keyword):
|
||||
"""return a (possibly async) function definition node as string"""
|
||||
decorate = node.decorators.accept(self) if node.decorators else ""
|
||||
docs = self._docs_dedent(node.doc) if node.doc else ""
|
||||
trailer = ":"
|
||||
if node.returns:
|
||||
return_annotation = " -> " + node.returns.as_string()
|
||||
trailer = return_annotation + ":"
|
||||
def_format = "\n%s%s %s(%s)%s%s\n%s"
|
||||
return def_format % (
|
||||
decorate,
|
||||
keyword,
|
||||
node.name,
|
||||
node.args.accept(self),
|
||||
trailer,
|
||||
docs,
|
||||
self._stmt_list(node.body),
|
||||
)
|
||||
|
||||
def visit_functiondef(self, node):
|
||||
"""return an astroid.FunctionDef node as string"""
|
||||
return self.handle_functiondef(node, "def")
|
||||
|
||||
def visit_asyncfunctiondef(self, node):
|
||||
"""return an astroid.AsyncFunction node as string"""
|
||||
return self.handle_functiondef(node, "async def")
|
||||
|
||||
def visit_generatorexp(self, node):
|
||||
"""return an astroid.GeneratorExp node as string"""
|
||||
return "({} {})".format(
|
||||
node.elt.accept(self), " ".join(n.accept(self) for n in node.generators)
|
||||
)
|
||||
|
||||
def visit_attribute(self, node):
|
||||
"""return an astroid.Getattr node as string"""
|
||||
left = self._precedence_parens(node, node.expr)
|
||||
if left.isdigit():
|
||||
left = f"({left})"
|
||||
return f"{left}.{node.attrname}"
|
||||
|
||||
def visit_global(self, node):
|
||||
"""return an astroid.Global node as string"""
|
||||
return f"global {', '.join(node.names)}"
|
||||
|
||||
def visit_if(self, node):
|
||||
"""return an astroid.If node as string"""
|
||||
ifs = [f"if {node.test.accept(self)}:\n{self._stmt_list(node.body)}"]
|
||||
if node.has_elif_block():
|
||||
ifs.append(f"el{self._stmt_list(node.orelse, indent=False)}")
|
||||
elif node.orelse:
|
||||
ifs.append(f"else:\n{self._stmt_list(node.orelse)}")
|
||||
return "\n".join(ifs)
|
||||
|
||||
def visit_ifexp(self, node):
|
||||
"""return an astroid.IfExp node as string"""
|
||||
return "{} if {} else {}".format(
|
||||
self._precedence_parens(node, node.body, is_left=True),
|
||||
self._precedence_parens(node, node.test, is_left=True),
|
||||
self._precedence_parens(node, node.orelse, is_left=False),
|
||||
)
|
||||
|
||||
def visit_import(self, node):
|
||||
"""return an astroid.Import node as string"""
|
||||
return f"import {_import_string(node.names)}"
|
||||
|
||||
def visit_keyword(self, node):
|
||||
"""return an astroid.Keyword node as string"""
|
||||
if node.arg is None:
|
||||
return f"**{node.value.accept(self)}"
|
||||
return f"{node.arg}={node.value.accept(self)}"
|
||||
|
||||
def visit_lambda(self, node):
|
||||
"""return an astroid.Lambda node as string"""
|
||||
args = node.args.accept(self)
|
||||
body = node.body.accept(self)
|
||||
if args:
|
||||
return f"lambda {args}: {body}"
|
||||
|
||||
return f"lambda: {body}"
|
||||
|
||||
def visit_list(self, node):
|
||||
"""return an astroid.List node as string"""
|
||||
return f"[{', '.join(child.accept(self) for child in node.elts)}]"
|
||||
|
||||
def visit_listcomp(self, node):
|
||||
"""return an astroid.ListComp node as string"""
|
||||
return "[{} {}]".format(
|
||||
node.elt.accept(self), " ".join(n.accept(self) for n in node.generators)
|
||||
)
|
||||
|
||||
def visit_module(self, node):
|
||||
"""return an astroid.Module node as string"""
|
||||
docs = f'"""{node.doc}"""\n\n' if node.doc else ""
|
||||
return docs + "\n".join(n.accept(self) for n in node.body) + "\n\n"
|
||||
|
||||
def visit_name(self, node):
|
||||
"""return an astroid.Name node as string"""
|
||||
return node.name
|
||||
|
||||
def visit_namedexpr(self, node):
|
||||
"""Return an assignment expression node as string"""
|
||||
target = node.target.accept(self)
|
||||
value = node.value.accept(self)
|
||||
return f"{target} := {value}"
|
||||
|
||||
def visit_nonlocal(self, node):
|
||||
"""return an astroid.Nonlocal node as string"""
|
||||
return f"nonlocal {', '.join(node.names)}"
|
||||
|
||||
def visit_pass(self, node):
|
||||
"""return an astroid.Pass node as string"""
|
||||
return "pass"
|
||||
|
||||
def visit_raise(self, node):
|
||||
"""return an astroid.Raise node as string"""
|
||||
if node.exc:
|
||||
if node.cause:
|
||||
return f"raise {node.exc.accept(self)} from {node.cause.accept(self)}"
|
||||
return f"raise {node.exc.accept(self)}"
|
||||
return "raise"
|
||||
|
||||
def visit_return(self, node):
|
||||
"""return an astroid.Return node as string"""
|
||||
if node.is_tuple_return() and len(node.value.elts) > 1:
|
||||
elts = [child.accept(self) for child in node.value.elts]
|
||||
return f"return {', '.join(elts)}"
|
||||
|
||||
if node.value:
|
||||
return f"return {node.value.accept(self)}"
|
||||
|
||||
return "return"
|
||||
|
||||
def visit_set(self, node):
|
||||
"""return an astroid.Set node as string"""
|
||||
return "{%s}" % ", ".join(child.accept(self) for child in node.elts)
|
||||
|
||||
def visit_setcomp(self, node):
|
||||
"""return an astroid.SetComp node as string"""
|
||||
return "{{{} {}}}".format(
|
||||
node.elt.accept(self), " ".join(n.accept(self) for n in node.generators)
|
||||
)
|
||||
|
||||
def visit_slice(self, node):
|
||||
"""return an astroid.Slice node as string"""
|
||||
lower = node.lower.accept(self) if node.lower else ""
|
||||
upper = node.upper.accept(self) if node.upper else ""
|
||||
step = node.step.accept(self) if node.step else ""
|
||||
if step:
|
||||
return f"{lower}:{upper}:{step}"
|
||||
return f"{lower}:{upper}"
|
||||
|
||||
def visit_subscript(self, node):
|
||||
"""return an astroid.Subscript node as string"""
|
||||
idx = node.slice
|
||||
if idx.__class__.__name__.lower() == "index":
|
||||
idx = idx.value
|
||||
idxstr = idx.accept(self)
|
||||
if idx.__class__.__name__.lower() == "tuple" and idx.elts:
|
||||
# Remove parenthesis in tuple and extended slice.
|
||||
# a[(::1, 1:)] is not valid syntax.
|
||||
idxstr = idxstr[1:-1]
|
||||
return f"{self._precedence_parens(node, node.value)}[{idxstr}]"
|
||||
|
||||
def visit_tryexcept(self, node):
|
||||
"""return an astroid.TryExcept node as string"""
|
||||
trys = [f"try:\n{self._stmt_list(node.body)}"]
|
||||
for handler in node.handlers:
|
||||
trys.append(handler.accept(self))
|
||||
if node.orelse:
|
||||
trys.append(f"else:\n{self._stmt_list(node.orelse)}")
|
||||
return "\n".join(trys)
|
||||
|
||||
def visit_tryfinally(self, node):
|
||||
"""return an astroid.TryFinally node as string"""
|
||||
return "try:\n{}\nfinally:\n{}".format(
|
||||
self._stmt_list(node.body), self._stmt_list(node.finalbody)
|
||||
)
|
||||
|
||||
def visit_tuple(self, node):
|
||||
"""return an astroid.Tuple node as string"""
|
||||
if len(node.elts) == 1:
|
||||
return f"({node.elts[0].accept(self)}, )"
|
||||
return f"({', '.join(child.accept(self) for child in node.elts)})"
|
||||
|
||||
def visit_unaryop(self, node):
|
||||
"""return an astroid.UnaryOp node as string"""
|
||||
if node.op == "not":
|
||||
operator = "not "
|
||||
else:
|
||||
operator = node.op
|
||||
return f"{operator}{self._precedence_parens(node, node.operand)}"
|
||||
|
||||
def visit_while(self, node):
|
||||
"""return an astroid.While node as string"""
|
||||
whiles = f"while {node.test.accept(self)}:\n{self._stmt_list(node.body)}"
|
||||
if node.orelse:
|
||||
whiles = f"{whiles}\nelse:\n{self._stmt_list(node.orelse)}"
|
||||
return whiles
|
||||
|
||||
def visit_with(self, node): # 'with' without 'as' is possible
|
||||
"""return an astroid.With node as string"""
|
||||
items = ", ".join(
|
||||
f"{expr.accept(self)}" + (v and f" as {v.accept(self)}" or "")
|
||||
for expr, v in node.items
|
||||
)
|
||||
return f"with {items}:\n{self._stmt_list(node.body)}"
|
||||
|
||||
def visit_yield(self, node):
|
||||
"""yield an ast.Yield node as string"""
|
||||
yi_val = (" " + node.value.accept(self)) if node.value else ""
|
||||
expr = "yield" + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
|
||||
return f"({expr})"
|
||||
|
||||
def visit_yieldfrom(self, node):
|
||||
"""Return an astroid.YieldFrom node as string."""
|
||||
yi_val = (" " + node.value.accept(self)) if node.value else ""
|
||||
expr = "yield from" + yi_val
|
||||
if node.parent.is_statement:
|
||||
return expr
|
||||
|
||||
return f"({expr})"
|
||||
|
||||
def visit_starred(self, node):
|
||||
"""return Starred node as string"""
|
||||
return "*" + node.value.accept(self)
|
||||
|
||||
def visit_match(self, node: "Match") -> str:
|
||||
"""Return an astroid.Match node as string."""
|
||||
return f"match {node.subject.accept(self)}:\n{self._stmt_list(node.cases)}"
|
||||
|
||||
def visit_matchcase(self, node: "MatchCase") -> str:
|
||||
"""Return an astroid.MatchCase node as string."""
|
||||
guard_str = f" if {node.guard.accept(self)}" if node.guard else ""
|
||||
return (
|
||||
f"case {node.pattern.accept(self)}{guard_str}:\n"
|
||||
f"{self._stmt_list(node.body)}"
|
||||
)
|
||||
|
||||
def visit_matchvalue(self, node: "MatchValue") -> str:
|
||||
"""Return an astroid.MatchValue node as string."""
|
||||
return node.value.accept(self)
|
||||
|
||||
@staticmethod
|
||||
def visit_matchsingleton(node: "MatchSingleton") -> str:
|
||||
"""Return an astroid.MatchSingleton node as string."""
|
||||
return str(node.value)
|
||||
|
||||
def visit_matchsequence(self, node: "MatchSequence") -> str:
|
||||
"""Return an astroid.MatchSequence node as string."""
|
||||
if node.patterns is None:
|
||||
return "[]"
|
||||
return f"[{', '.join(p.accept(self) for p in node.patterns)}]"
|
||||
|
||||
def visit_matchmapping(self, node: "MatchMapping") -> str:
|
||||
"""Return an astroid.MatchMapping node as string."""
|
||||
mapping_strings: List[str] = []
|
||||
if node.keys and node.patterns:
|
||||
mapping_strings.extend(
|
||||
f"{key.accept(self)}: {p.accept(self)}"
|
||||
for key, p in zip(node.keys, node.patterns)
|
||||
)
|
||||
if node.rest:
|
||||
mapping_strings.append(f"**{node.rest.accept(self)}")
|
||||
return f"{'{'}{', '.join(mapping_strings)}{'}'}"
|
||||
|
||||
def visit_matchclass(self, node: "MatchClass") -> str:
|
||||
"""Return an astroid.MatchClass node as string."""
|
||||
if node.cls is None:
|
||||
raise Exception(f"{node} does not have a 'cls' node")
|
||||
class_strings: List[str] = []
|
||||
if node.patterns:
|
||||
class_strings.extend(p.accept(self) for p in node.patterns)
|
||||
if node.kwd_attrs and node.kwd_patterns:
|
||||
for attr, pattern in zip(node.kwd_attrs, node.kwd_patterns):
|
||||
class_strings.append(f"{attr}={pattern.accept(self)}")
|
||||
return f"{node.cls.accept(self)}({', '.join(class_strings)})"
|
||||
|
||||
def visit_matchstar(self, node: "MatchStar") -> str:
|
||||
"""Return an astroid.MatchStar node as string."""
|
||||
return f"*{node.name.accept(self) if node.name else '_'}"
|
||||
|
||||
def visit_matchas(self, node: "MatchAs") -> str:
|
||||
"""Return an astroid.MatchAs node as string."""
|
||||
# pylint: disable=import-outside-toplevel
|
||||
# Prevent circular dependency
|
||||
from astroid.nodes.node_classes import MatchClass, MatchMapping, MatchSequence
|
||||
|
||||
if isinstance(node.parent, (MatchSequence, MatchMapping, MatchClass)):
|
||||
return node.name.accept(self) if node.name else "_"
|
||||
return (
|
||||
f"{node.pattern.accept(self) if node.pattern else '_'}"
|
||||
f"{f' as {node.name.accept(self)}' if node.name else ''}"
|
||||
)
|
||||
|
||||
def visit_matchor(self, node: "MatchOr") -> str:
|
||||
"""Return an astroid.MatchOr node as string."""
|
||||
if node.patterns is None:
|
||||
raise Exception(f"{node} does not have pattern nodes")
|
||||
return " | ".join(p.accept(self) for p in node.patterns)
|
||||
|
||||
# These aren't for real AST nodes, but for inference objects.
|
||||
|
||||
def visit_frozenset(self, node):
|
||||
return node.parent.accept(self)
|
||||
|
||||
def visit_super(self, node):
|
||||
return node.parent.accept(self)
|
||||
|
||||
def visit_uninferable(self, node):
|
||||
return str(node)
|
||||
|
||||
def visit_property(self, node):
|
||||
return node.function.accept(self)
|
||||
|
||||
def visit_evaluatedobject(self, node):
|
||||
return node.original.accept(self)
|
||||
|
||||
def visit_unknown(self, node: "Unknown") -> str:
|
||||
return str(node)
|
||||
|
||||
|
||||
def _import_string(names):
|
||||
"""return a list of (name, asname) formatted as a string"""
|
||||
_names = []
|
||||
for name, asname in names:
|
||||
if asname is not None:
|
||||
_names.append(f"{name} as {asname}")
|
||||
else:
|
||||
_names.append(name)
|
||||
return ", ".join(_names)
|
||||
|
||||
|
||||
# This sets the default indent to 4 spaces.
|
||||
to_code = AsStringVisitor(" ")
|
|
@ -1,27 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/master/LICENSE
|
||||
|
||||
|
||||
OP_PRECEDENCE = {
|
||||
op: precedence
|
||||
for precedence, ops in enumerate(
|
||||
[
|
||||
["Lambda"], # lambda x: x + 1
|
||||
["IfExp"], # 1 if True else 2
|
||||
["or"],
|
||||
["and"],
|
||||
["not"],
|
||||
["Compare"], # in, not in, is, is not, <, <=, >, >=, !=, ==
|
||||
["|"],
|
||||
["^"],
|
||||
["&"],
|
||||
["<<", ">>"],
|
||||
["+", "-"],
|
||||
["*", "@", "/", "//", "%"],
|
||||
["UnaryOp"], # +, -, ~
|
||||
["**"],
|
||||
["Await"],
|
||||
]
|
||||
)
|
||||
for op in ops
|
||||
}
|
File diff suppressed because it is too large
Load Diff
|
@ -1,789 +0,0 @@
|
|||
import pprint
|
||||
import sys
|
||||
import typing
|
||||
import warnings
|
||||
from functools import singledispatch as _singledispatch
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
ClassVar,
|
||||
Iterator,
|
||||
List,
|
||||
Optional,
|
||||
Tuple,
|
||||
Type,
|
||||
TypeVar,
|
||||
Union,
|
||||
cast,
|
||||
overload,
|
||||
)
|
||||
|
||||
from astroid import decorators, util
|
||||
from astroid.exceptions import (
|
||||
AstroidError,
|
||||
InferenceError,
|
||||
ParentMissingError,
|
||||
StatementMissing,
|
||||
UseInferenceDefault,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes.as_string import AsStringVisitor
|
||||
from astroid.nodes.const import OP_PRECEDENCE
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from astroid import nodes
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
from typing import Literal
|
||||
else:
|
||||
from typing_extensions import Literal
|
||||
|
||||
|
||||
# Types for 'NodeNG.nodes_of_class()'
|
||||
T_Nodes = TypeVar("T_Nodes", bound="NodeNG")
|
||||
T_Nodes2 = TypeVar("T_Nodes2", bound="NodeNG")
|
||||
T_Nodes3 = TypeVar("T_Nodes3", bound="NodeNG")
|
||||
SkipKlassT = Union[None, Type["NodeNG"], Tuple[Type["NodeNG"], ...]]
|
||||
|
||||
|
||||
class NodeNG:
|
||||
"""A node of the new Abstract Syntax Tree (AST).
|
||||
|
||||
This is the base class for all Astroid node classes.
|
||||
"""
|
||||
|
||||
is_statement: ClassVar[bool] = False
|
||||
"""Whether this node indicates a statement."""
|
||||
optional_assign: ClassVar[
|
||||
bool
|
||||
] = False # True for For (and for Comprehension if py <3.0)
|
||||
"""Whether this node optionally assigns a variable.
|
||||
|
||||
This is for loop assignments because loop won't necessarily perform an
|
||||
assignment if the loop has no iterations.
|
||||
This is also the case from comprehensions in Python 2.
|
||||
"""
|
||||
is_function: ClassVar[bool] = False # True for FunctionDef nodes
|
||||
"""Whether this node indicates a function."""
|
||||
is_lambda: ClassVar[bool] = False
|
||||
|
||||
# Attributes below are set by the builder module or by raw factories
|
||||
_astroid_fields: ClassVar[typing.Tuple[str, ...]] = ()
|
||||
"""Node attributes that contain child nodes.
|
||||
|
||||
This is redefined in most concrete classes.
|
||||
"""
|
||||
_other_fields: ClassVar[typing.Tuple[str, ...]] = ()
|
||||
"""Node attributes that do not contain child nodes."""
|
||||
_other_other_fields: ClassVar[typing.Tuple[str, ...]] = ()
|
||||
"""Attributes that contain AST-dependent fields."""
|
||||
# instance specific inference function infer(node, context)
|
||||
_explicit_inference = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
lineno: Optional[int] = None,
|
||||
col_offset: Optional[int] = None,
|
||||
parent: Optional["NodeNG"] = None,
|
||||
*,
|
||||
end_lineno: Optional[int] = None,
|
||||
end_col_offset: Optional[int] = None,
|
||||
) -> None:
|
||||
"""
|
||||
:param lineno: The line that this node appears on in the source code.
|
||||
|
||||
:param col_offset: The column that this node appears on in the
|
||||
source code.
|
||||
|
||||
:param parent: The parent node in the syntax tree.
|
||||
|
||||
:param end_lineno: The last line this node appears on in the source code.
|
||||
|
||||
:param end_col_offset: The end column this node appears on in the
|
||||
source code. Note: This is after the last symbol.
|
||||
"""
|
||||
self.lineno: Optional[int] = lineno
|
||||
"""The line that this node appears on in the source code."""
|
||||
|
||||
self.col_offset: Optional[int] = col_offset
|
||||
"""The column that this node appears on in the source code."""
|
||||
|
||||
self.parent: Optional["NodeNG"] = parent
|
||||
"""The parent node in the syntax tree."""
|
||||
|
||||
self.end_lineno: Optional[int] = end_lineno
|
||||
"""The last line this node appears on in the source code."""
|
||||
|
||||
self.end_col_offset: Optional[int] = end_col_offset
|
||||
"""The end column this node appears on in the source code.
|
||||
Note: This is after the last symbol.
|
||||
"""
|
||||
|
||||
def infer(self, context=None, **kwargs):
|
||||
"""Get a generator of the inferred values.
|
||||
|
||||
This is the main entry point to the inference system.
|
||||
|
||||
.. seealso:: :ref:`inference`
|
||||
|
||||
If the instance has some explicit inference function set, it will be
|
||||
called instead of the default interface.
|
||||
|
||||
:returns: The inferred values.
|
||||
:rtype: iterable
|
||||
"""
|
||||
if context is not None:
|
||||
context = context.extra_context.get(self, context)
|
||||
if self._explicit_inference is not None:
|
||||
# explicit_inference is not bound, give it self explicitly
|
||||
try:
|
||||
# pylint: disable=not-callable
|
||||
results = list(self._explicit_inference(self, context, **kwargs))
|
||||
if context is not None:
|
||||
context.nodes_inferred += len(results)
|
||||
yield from results
|
||||
return
|
||||
except UseInferenceDefault:
|
||||
pass
|
||||
|
||||
if not context:
|
||||
# nodes_inferred?
|
||||
yield from self._infer(context, **kwargs)
|
||||
return
|
||||
|
||||
key = (self, context.lookupname, context.callcontext, context.boundnode)
|
||||
if key in context.inferred:
|
||||
yield from context.inferred[key]
|
||||
return
|
||||
|
||||
generator = self._infer(context, **kwargs)
|
||||
results = []
|
||||
|
||||
# Limit inference amount to help with performance issues with
|
||||
# exponentially exploding possible results.
|
||||
limit = AstroidManager().max_inferable_values
|
||||
for i, result in enumerate(generator):
|
||||
if i >= limit or (context.nodes_inferred > context.max_inferred):
|
||||
yield util.Uninferable
|
||||
break
|
||||
results.append(result)
|
||||
yield result
|
||||
context.nodes_inferred += 1
|
||||
|
||||
# Cache generated results for subsequent inferences of the
|
||||
# same node using the same context
|
||||
context.inferred[key] = tuple(results)
|
||||
return
|
||||
|
||||
def _repr_name(self):
|
||||
"""Get a name for nice representation.
|
||||
|
||||
This is either :attr:`name`, :attr:`attrname`, or the empty string.
|
||||
|
||||
:returns: The nice name.
|
||||
:rtype: str
|
||||
"""
|
||||
if all(name not in self._astroid_fields for name in ("name", "attrname")):
|
||||
return getattr(self, "name", "") or getattr(self, "attrname", "")
|
||||
return ""
|
||||
|
||||
def __str__(self):
|
||||
rname = self._repr_name()
|
||||
cname = type(self).__name__
|
||||
if rname:
|
||||
string = "%(cname)s.%(rname)s(%(fields)s)"
|
||||
alignment = len(cname) + len(rname) + 2
|
||||
else:
|
||||
string = "%(cname)s(%(fields)s)"
|
||||
alignment = len(cname) + 1
|
||||
result = []
|
||||
for field in self._other_fields + self._astroid_fields:
|
||||
value = getattr(self, field)
|
||||
width = 80 - len(field) - alignment
|
||||
lines = pprint.pformat(value, indent=2, width=width).splitlines(True)
|
||||
|
||||
inner = [lines[0]]
|
||||
for line in lines[1:]:
|
||||
inner.append(" " * alignment + line)
|
||||
result.append(f"{field}={''.join(inner)}")
|
||||
|
||||
return string % {
|
||||
"cname": cname,
|
||||
"rname": rname,
|
||||
"fields": (",\n" + " " * alignment).join(result),
|
||||
}
|
||||
|
||||
def __repr__(self):
|
||||
rname = self._repr_name()
|
||||
if rname:
|
||||
string = "<%(cname)s.%(rname)s l.%(lineno)s at 0x%(id)x>"
|
||||
else:
|
||||
string = "<%(cname)s l.%(lineno)s at 0x%(id)x>"
|
||||
return string % {
|
||||
"cname": type(self).__name__,
|
||||
"rname": rname,
|
||||
"lineno": self.fromlineno,
|
||||
"id": id(self),
|
||||
}
|
||||
|
||||
def accept(self, visitor):
|
||||
"""Visit this node using the given visitor."""
|
||||
func = getattr(visitor, "visit_" + self.__class__.__name__.lower())
|
||||
return func(self)
|
||||
|
||||
def get_children(self) -> Iterator["NodeNG"]:
|
||||
"""Get the child nodes below this node."""
|
||||
for field in self._astroid_fields:
|
||||
attr = getattr(self, field)
|
||||
if attr is None:
|
||||
continue
|
||||
if isinstance(attr, (list, tuple)):
|
||||
yield from attr
|
||||
else:
|
||||
yield attr
|
||||
yield from ()
|
||||
|
||||
def last_child(self) -> Optional["NodeNG"]:
|
||||
"""An optimized version of list(get_children())[-1]"""
|
||||
for field in self._astroid_fields[::-1]:
|
||||
attr = getattr(self, field)
|
||||
if not attr: # None or empty listy / tuple
|
||||
continue
|
||||
if isinstance(attr, (list, tuple)):
|
||||
return attr[-1]
|
||||
return attr
|
||||
return None
|
||||
|
||||
def node_ancestors(self) -> Iterator["NodeNG"]:
|
||||
"""Yield parent, grandparent, etc until there are no more."""
|
||||
parent = self.parent
|
||||
while parent is not None:
|
||||
yield parent
|
||||
parent = parent.parent
|
||||
|
||||
def parent_of(self, node):
|
||||
"""Check if this node is the parent of the given node.
|
||||
|
||||
:param node: The node to check if it is the child.
|
||||
:type node: NodeNG
|
||||
|
||||
:returns: True if this node is the parent of the given node,
|
||||
False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
return any(self is parent for parent in node.node_ancestors())
|
||||
|
||||
@overload
|
||||
def statement(
|
||||
self, *, future: Literal[None] = ...
|
||||
) -> Union["nodes.Statement", "nodes.Module"]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def statement(self, *, future: Literal[True]) -> "nodes.Statement":
|
||||
...
|
||||
|
||||
def statement(
|
||||
self, *, future: Literal[None, True] = None
|
||||
) -> Union["nodes.Statement", "nodes.Module"]:
|
||||
"""The first parent node, including self, marked as statement node.
|
||||
|
||||
TODO: Deprecate the future parameter and only raise StatementMissing and return
|
||||
nodes.Statement
|
||||
|
||||
:raises AttributeError: If self has no parent attribute
|
||||
:raises StatementMissing: If self has no parent attribute and future is True
|
||||
"""
|
||||
if self.is_statement:
|
||||
return cast("nodes.Statement", self)
|
||||
if not self.parent:
|
||||
if future:
|
||||
raise StatementMissing(target=self)
|
||||
warnings.warn(
|
||||
"In astroid 3.0.0 NodeNG.statement() will return either a nodes.Statement "
|
||||
"or raise a StatementMissing exception. AttributeError will no longer be raised. "
|
||||
"This behaviour can already be triggered "
|
||||
"by passing 'future=True' to a statement() call.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
raise AttributeError(f"{self} object has no attribute 'parent'")
|
||||
return self.parent.statement(future=future)
|
||||
|
||||
def frame(
|
||||
self, *, future: Literal[None, True] = None
|
||||
) -> Union["nodes.FunctionDef", "nodes.Module", "nodes.ClassDef", "nodes.Lambda"]:
|
||||
"""The first parent frame node.
|
||||
|
||||
A frame node is a :class:`Module`, :class:`FunctionDef`,
|
||||
:class:`ClassDef` or :class:`Lambda`.
|
||||
|
||||
:returns: The first parent frame node.
|
||||
"""
|
||||
if self.parent is None:
|
||||
if future:
|
||||
raise ParentMissingError(target=self)
|
||||
warnings.warn(
|
||||
"In astroid 3.0.0 NodeNG.frame() will return either a Frame node, "
|
||||
"or raise ParentMissingError. AttributeError will no longer be raised. "
|
||||
"This behaviour can already be triggered "
|
||||
"by passing 'future=True' to a frame() call.",
|
||||
DeprecationWarning,
|
||||
)
|
||||
raise AttributeError(f"{self} object has no attribute 'parent'")
|
||||
|
||||
return self.parent.frame(future=future)
|
||||
|
||||
def scope(self) -> "nodes.LocalsDictNodeNG":
|
||||
"""The first parent node defining a new scope.
|
||||
These can be Module, FunctionDef, ClassDef, Lambda, or GeneratorExp nodes.
|
||||
|
||||
:returns: The first parent scope node.
|
||||
"""
|
||||
if not self.parent:
|
||||
raise ParentMissingError(target=self)
|
||||
return self.parent.scope()
|
||||
|
||||
def root(self):
|
||||
"""Return the root node of the syntax tree.
|
||||
|
||||
:returns: The root node.
|
||||
:rtype: Module
|
||||
"""
|
||||
if self.parent:
|
||||
return self.parent.root()
|
||||
return self
|
||||
|
||||
def child_sequence(self, child):
|
||||
"""Search for the sequence that contains this child.
|
||||
|
||||
:param child: The child node to search sequences for.
|
||||
:type child: NodeNG
|
||||
|
||||
:returns: The sequence containing the given child node.
|
||||
:rtype: iterable(NodeNG)
|
||||
|
||||
:raises AstroidError: If no sequence could be found that contains
|
||||
the given child.
|
||||
"""
|
||||
for field in self._astroid_fields:
|
||||
node_or_sequence = getattr(self, field)
|
||||
if node_or_sequence is child:
|
||||
return [node_or_sequence]
|
||||
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
|
||||
if (
|
||||
isinstance(node_or_sequence, (tuple, list))
|
||||
and child in node_or_sequence
|
||||
):
|
||||
return node_or_sequence
|
||||
|
||||
msg = "Could not find %s in %s's children"
|
||||
raise AstroidError(msg % (repr(child), repr(self)))
|
||||
|
||||
def locate_child(self, child):
|
||||
"""Find the field of this node that contains the given child.
|
||||
|
||||
:param child: The child node to search fields for.
|
||||
:type child: NodeNG
|
||||
|
||||
:returns: A tuple of the name of the field that contains the child,
|
||||
and the sequence or node that contains the child node.
|
||||
:rtype: tuple(str, iterable(NodeNG) or NodeNG)
|
||||
|
||||
:raises AstroidError: If no field could be found that contains
|
||||
the given child.
|
||||
"""
|
||||
for field in self._astroid_fields:
|
||||
node_or_sequence = getattr(self, field)
|
||||
# /!\ compiler.ast Nodes have an __iter__ walking over child nodes
|
||||
if child is node_or_sequence:
|
||||
return field, child
|
||||
if (
|
||||
isinstance(node_or_sequence, (tuple, list))
|
||||
and child in node_or_sequence
|
||||
):
|
||||
return field, node_or_sequence
|
||||
msg = "Could not find %s in %s's children"
|
||||
raise AstroidError(msg % (repr(child), repr(self)))
|
||||
|
||||
# FIXME : should we merge child_sequence and locate_child ? locate_child
|
||||
# is only used in are_exclusive, child_sequence one time in pylint.
|
||||
|
||||
def next_sibling(self):
|
||||
"""The next sibling statement node.
|
||||
|
||||
:returns: The next sibling statement node.
|
||||
:rtype: NodeNG or None
|
||||
"""
|
||||
return self.parent.next_sibling()
|
||||
|
||||
def previous_sibling(self):
|
||||
"""The previous sibling statement.
|
||||
|
||||
:returns: The previous sibling statement node.
|
||||
:rtype: NodeNG or None
|
||||
"""
|
||||
return self.parent.previous_sibling()
|
||||
|
||||
# these are lazy because they're relatively expensive to compute for every
|
||||
# single node, and they rarely get looked at
|
||||
|
||||
@decorators.cachedproperty
|
||||
def fromlineno(self) -> Optional[int]:
|
||||
"""The first line that this node appears on in the source code."""
|
||||
if self.lineno is None:
|
||||
return self._fixed_source_line()
|
||||
return self.lineno
|
||||
|
||||
@decorators.cachedproperty
|
||||
def tolineno(self) -> Optional[int]:
|
||||
"""The last line that this node appears on in the source code."""
|
||||
if not self._astroid_fields:
|
||||
# can't have children
|
||||
last_child = None
|
||||
else:
|
||||
last_child = self.last_child()
|
||||
if last_child is None:
|
||||
return self.fromlineno
|
||||
return last_child.tolineno
|
||||
|
||||
def _fixed_source_line(self) -> Optional[int]:
|
||||
"""Attempt to find the line that this node appears on.
|
||||
|
||||
We need this method since not all nodes have :attr:`lineno` set.
|
||||
"""
|
||||
line = self.lineno
|
||||
_node: Optional[NodeNG] = self
|
||||
try:
|
||||
while line is None:
|
||||
_node = next(_node.get_children())
|
||||
line = _node.lineno
|
||||
except StopIteration:
|
||||
_node = self.parent
|
||||
while _node and line is None:
|
||||
line = _node.lineno
|
||||
_node = _node.parent
|
||||
return line
|
||||
|
||||
def block_range(self, lineno):
|
||||
"""Get a range from the given line number to where this node ends.
|
||||
|
||||
:param lineno: The line number to start the range at.
|
||||
:type lineno: int
|
||||
|
||||
:returns: The range of line numbers that this node belongs to,
|
||||
starting at the given line number.
|
||||
:rtype: tuple(int, int or None)
|
||||
"""
|
||||
return lineno, self.tolineno
|
||||
|
||||
def set_local(self, name, stmt):
|
||||
"""Define that the given name is declared in the given statement node.
|
||||
|
||||
This definition is stored on the parent scope node.
|
||||
|
||||
.. seealso:: :meth:`scope`
|
||||
|
||||
:param name: The name that is being defined.
|
||||
:type name: str
|
||||
|
||||
:param stmt: The statement that defines the given name.
|
||||
:type stmt: NodeNG
|
||||
"""
|
||||
self.parent.set_local(name, stmt)
|
||||
|
||||
@overload
|
||||
def nodes_of_class(
|
||||
self,
|
||||
klass: Type[T_Nodes],
|
||||
skip_klass: SkipKlassT = None,
|
||||
) -> Iterator[T_Nodes]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def nodes_of_class(
|
||||
self,
|
||||
klass: Tuple[Type[T_Nodes], Type[T_Nodes2]],
|
||||
skip_klass: SkipKlassT = None,
|
||||
) -> Union[Iterator[T_Nodes], Iterator[T_Nodes2]]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def nodes_of_class(
|
||||
self,
|
||||
klass: Tuple[Type[T_Nodes], Type[T_Nodes2], Type[T_Nodes3]],
|
||||
skip_klass: SkipKlassT = None,
|
||||
) -> Union[Iterator[T_Nodes], Iterator[T_Nodes2], Iterator[T_Nodes3]]:
|
||||
...
|
||||
|
||||
@overload
|
||||
def nodes_of_class(
|
||||
self,
|
||||
klass: Tuple[Type[T_Nodes], ...],
|
||||
skip_klass: SkipKlassT = None,
|
||||
) -> Iterator[T_Nodes]:
|
||||
...
|
||||
|
||||
def nodes_of_class( # type: ignore[misc] # mypy doesn't correctly recognize the overloads
|
||||
self,
|
||||
klass: Union[
|
||||
Type[T_Nodes],
|
||||
Tuple[Type[T_Nodes], Type[T_Nodes2]],
|
||||
Tuple[Type[T_Nodes], Type[T_Nodes2], Type[T_Nodes3]],
|
||||
Tuple[Type[T_Nodes], ...],
|
||||
],
|
||||
skip_klass: SkipKlassT = None,
|
||||
) -> Union[Iterator[T_Nodes], Iterator[T_Nodes2], Iterator[T_Nodes3]]:
|
||||
"""Get the nodes (including this one or below) of the given types.
|
||||
|
||||
:param klass: The types of node to search for.
|
||||
|
||||
:param skip_klass: The types of node to ignore. This is useful to ignore
|
||||
subclasses of :attr:`klass`.
|
||||
|
||||
:returns: The node of the given types.
|
||||
"""
|
||||
if isinstance(self, klass):
|
||||
yield self
|
||||
|
||||
if skip_klass is None:
|
||||
for child_node in self.get_children():
|
||||
yield from child_node.nodes_of_class(klass, skip_klass)
|
||||
|
||||
return
|
||||
|
||||
for child_node in self.get_children():
|
||||
if isinstance(child_node, skip_klass):
|
||||
continue
|
||||
yield from child_node.nodes_of_class(klass, skip_klass)
|
||||
|
||||
@decorators.cached
|
||||
def _get_assign_nodes(self):
|
||||
return []
|
||||
|
||||
def _get_name_nodes(self):
|
||||
for child_node in self.get_children():
|
||||
yield from child_node._get_name_nodes()
|
||||
|
||||
def _get_return_nodes_skip_functions(self):
|
||||
yield from ()
|
||||
|
||||
def _get_yield_nodes_skip_lambdas(self):
|
||||
yield from ()
|
||||
|
||||
def _infer_name(self, frame, name):
|
||||
# overridden for ImportFrom, Import, Global, TryExcept and Arguments
|
||||
pass
|
||||
|
||||
def _infer(self, context=None):
|
||||
"""we don't know how to resolve a statement by default"""
|
||||
# this method is overridden by most concrete classes
|
||||
raise InferenceError(
|
||||
"No inference function for {node!r}.", node=self, context=context
|
||||
)
|
||||
|
||||
def inferred(self):
|
||||
"""Get a list of the inferred values.
|
||||
|
||||
.. seealso:: :ref:`inference`
|
||||
|
||||
:returns: The inferred values.
|
||||
:rtype: list
|
||||
"""
|
||||
return list(self.infer())
|
||||
|
||||
def instantiate_class(self):
|
||||
"""Instantiate an instance of the defined class.
|
||||
|
||||
.. note::
|
||||
|
||||
On anything other than a :class:`ClassDef` this will return self.
|
||||
|
||||
:returns: An instance of the defined class.
|
||||
:rtype: object
|
||||
"""
|
||||
return self
|
||||
|
||||
def has_base(self, node):
|
||||
"""Check if this node inherits from the given type.
|
||||
|
||||
:param node: The node defining the base to look for.
|
||||
Usually this is a :class:`Name` node.
|
||||
:type node: NodeNG
|
||||
"""
|
||||
return False
|
||||
|
||||
def callable(self):
|
||||
"""Whether this node defines something that is callable.
|
||||
|
||||
:returns: True if this defines something that is callable,
|
||||
False otherwise.
|
||||
:rtype: bool
|
||||
"""
|
||||
return False
|
||||
|
||||
def eq(self, value):
|
||||
return False
|
||||
|
||||
def as_string(self) -> str:
|
||||
"""Get the source code that this node represents."""
|
||||
return AsStringVisitor()(self)
|
||||
|
||||
def repr_tree(
|
||||
self,
|
||||
ids=False,
|
||||
include_linenos=False,
|
||||
ast_state=False,
|
||||
indent=" ",
|
||||
max_depth=0,
|
||||
max_width=80,
|
||||
) -> str:
|
||||
"""Get a string representation of the AST from this node.
|
||||
|
||||
:param ids: If true, includes the ids with the node type names.
|
||||
:type ids: bool
|
||||
|
||||
:param include_linenos: If true, includes the line numbers and
|
||||
column offsets.
|
||||
:type include_linenos: bool
|
||||
|
||||
:param ast_state: If true, includes information derived from
|
||||
the whole AST like local and global variables.
|
||||
:type ast_state: bool
|
||||
|
||||
:param indent: A string to use to indent the output string.
|
||||
:type indent: str
|
||||
|
||||
:param max_depth: If set to a positive integer, won't return
|
||||
nodes deeper than max_depth in the string.
|
||||
:type max_depth: int
|
||||
|
||||
:param max_width: Attempt to format the output string to stay
|
||||
within this number of characters, but can exceed it under some
|
||||
circumstances. Only positive integer values are valid, the default is 80.
|
||||
:type max_width: int
|
||||
|
||||
:returns: The string representation of the AST.
|
||||
:rtype: str
|
||||
"""
|
||||
|
||||
@_singledispatch
|
||||
def _repr_tree(node, result, done, cur_indent="", depth=1):
|
||||
"""Outputs a representation of a non-tuple/list, non-node that's
|
||||
contained within an AST, including strings.
|
||||
"""
|
||||
lines = pprint.pformat(
|
||||
node, width=max(max_width - len(cur_indent), 1)
|
||||
).splitlines(True)
|
||||
result.append(lines[0])
|
||||
result.extend([cur_indent + line for line in lines[1:]])
|
||||
return len(lines) != 1
|
||||
|
||||
# pylint: disable=unused-variable,useless-suppression; doesn't understand singledispatch
|
||||
@_repr_tree.register(tuple)
|
||||
@_repr_tree.register(list)
|
||||
def _repr_seq(node, result, done, cur_indent="", depth=1):
|
||||
"""Outputs a representation of a sequence that's contained within an AST."""
|
||||
cur_indent += indent
|
||||
result.append("[")
|
||||
if not node:
|
||||
broken = False
|
||||
elif len(node) == 1:
|
||||
broken = _repr_tree(node[0], result, done, cur_indent, depth)
|
||||
elif len(node) == 2:
|
||||
broken = _repr_tree(node[0], result, done, cur_indent, depth)
|
||||
if not broken:
|
||||
result.append(", ")
|
||||
else:
|
||||
result.append(",\n")
|
||||
result.append(cur_indent)
|
||||
broken = _repr_tree(node[1], result, done, cur_indent, depth) or broken
|
||||
else:
|
||||
result.append("\n")
|
||||
result.append(cur_indent)
|
||||
for child in node[:-1]:
|
||||
_repr_tree(child, result, done, cur_indent, depth)
|
||||
result.append(",\n")
|
||||
result.append(cur_indent)
|
||||
_repr_tree(node[-1], result, done, cur_indent, depth)
|
||||
broken = True
|
||||
result.append("]")
|
||||
return broken
|
||||
|
||||
# pylint: disable=unused-variable,useless-suppression; doesn't understand singledispatch
|
||||
@_repr_tree.register(NodeNG)
|
||||
def _repr_node(node, result, done, cur_indent="", depth=1):
|
||||
"""Outputs a strings representation of an astroid node."""
|
||||
if node in done:
|
||||
result.append(
|
||||
indent + f"<Recursion on {type(node).__name__} with id={id(node)}"
|
||||
)
|
||||
return False
|
||||
done.add(node)
|
||||
|
||||
if max_depth and depth > max_depth:
|
||||
result.append("...")
|
||||
return False
|
||||
depth += 1
|
||||
cur_indent += indent
|
||||
if ids:
|
||||
result.append(f"{type(node).__name__}<0x{id(node):x}>(\n")
|
||||
else:
|
||||
result.append(f"{type(node).__name__}(")
|
||||
fields = []
|
||||
if include_linenos:
|
||||
fields.extend(("lineno", "col_offset"))
|
||||
fields.extend(node._other_fields)
|
||||
fields.extend(node._astroid_fields)
|
||||
if ast_state:
|
||||
fields.extend(node._other_other_fields)
|
||||
if not fields:
|
||||
broken = False
|
||||
elif len(fields) == 1:
|
||||
result.append(f"{fields[0]}=")
|
||||
broken = _repr_tree(
|
||||
getattr(node, fields[0]), result, done, cur_indent, depth
|
||||
)
|
||||
else:
|
||||
result.append("\n")
|
||||
result.append(cur_indent)
|
||||
for field in fields[:-1]:
|
||||
result.append(f"{field}=")
|
||||
_repr_tree(getattr(node, field), result, done, cur_indent, depth)
|
||||
result.append(",\n")
|
||||
result.append(cur_indent)
|
||||
result.append(f"{fields[-1]}=")
|
||||
_repr_tree(getattr(node, fields[-1]), result, done, cur_indent, depth)
|
||||
broken = True
|
||||
result.append(")")
|
||||
return broken
|
||||
|
||||
result: List[str] = []
|
||||
_repr_tree(self, result, set())
|
||||
return "".join(result)
|
||||
|
||||
def bool_value(self, context=None):
|
||||
"""Determine the boolean value of this node.
|
||||
|
||||
The boolean value of a node can have three
|
||||
possible values:
|
||||
|
||||
* False: For instance, empty data structures,
|
||||
False, empty strings, instances which return
|
||||
explicitly False from the __nonzero__ / __bool__
|
||||
method.
|
||||
* True: Most of constructs are True by default:
|
||||
classes, functions, modules etc
|
||||
* Uninferable: The inference engine is uncertain of the
|
||||
node's value.
|
||||
|
||||
:returns: The boolean value of this node.
|
||||
:rtype: bool or Uninferable
|
||||
"""
|
||||
return util.Uninferable
|
||||
|
||||
def op_precedence(self):
|
||||
# Look up by class name or default to highest precedence
|
||||
return OP_PRECEDENCE.get(self.__class__.__name__, len(OP_PRECEDENCE))
|
||||
|
||||
def op_left_associative(self):
|
||||
# Everything is left associative except `**` and IfExp
|
||||
return True
|
|
@ -1,43 +0,0 @@
|
|||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""This module contains all classes that are considered a "scoped" node and anything related.
|
||||
A scope node is a node that opens a new local scope in the language definition:
|
||||
Module, ClassDef, FunctionDef (and Lambda, GeneratorExp, DictComp and SetComp to some extent).
|
||||
"""
|
||||
from astroid.nodes.scoped_nodes.scoped_nodes import (
|
||||
AsyncFunctionDef,
|
||||
ClassDef,
|
||||
ComprehensionScope,
|
||||
DictComp,
|
||||
FunctionDef,
|
||||
GeneratorExp,
|
||||
Lambda,
|
||||
ListComp,
|
||||
LocalsDictNodeNG,
|
||||
Module,
|
||||
SetComp,
|
||||
_is_metaclass,
|
||||
builtin_lookup,
|
||||
function_to_method,
|
||||
get_wrapping_class,
|
||||
)
|
||||
|
||||
__all__ = (
|
||||
"AsyncFunctionDef",
|
||||
"ClassDef",
|
||||
"ComprehensionScope",
|
||||
"DictComp",
|
||||
"FunctionDef",
|
||||
"GeneratorExp",
|
||||
"Lambda",
|
||||
"ListComp",
|
||||
"LocalsDictNodeNG",
|
||||
"Module",
|
||||
"SetComp",
|
||||
"builtin_lookup",
|
||||
"function_to_method",
|
||||
"get_wrapping_class",
|
||||
"_is_metaclass",
|
||||
)
|
File diff suppressed because it is too large
Load Diff
|
@ -1,326 +0,0 @@
|
|||
# Copyright (c) 2015-2016, 2018-2020 Claudiu Popa <pcmanticore@gmail.com>
|
||||
# Copyright (c) 2015-2016 Ceridwen <ceridwenv@gmail.com>
|
||||
# Copyright (c) 2015 Florian Bruhin <me@the-compiler.org>
|
||||
# Copyright (c) 2016 Derek Gustafson <degustaf@gmail.com>
|
||||
# Copyright (c) 2018 hippo91 <guillaume.peillex@gmail.com>
|
||||
# Copyright (c) 2018 Bryce Guinta <bryce.paul.guinta@gmail.com>
|
||||
# Copyright (c) 2021 Pierre Sassoulas <pierre.sassoulas@gmail.com>
|
||||
# Copyright (c) 2021 Craig Franklin <craigjfranklin@gmail.com>
|
||||
# Copyright (c) 2021 Alphadelta14 <alpha@alphaservcomputing.solutions>
|
||||
# Copyright (c) 2021 Marc Mueller <30130371+cdce8p@users.noreply.github.com>
|
||||
|
||||
# Licensed under the LGPL: https://www.gnu.org/licenses/old-licenses/lgpl-2.1.en.html
|
||||
# For details: https://github.com/PyCQA/astroid/blob/main/LICENSE
|
||||
|
||||
|
||||
"""
|
||||
Inference objects are a way to represent composite AST nodes,
|
||||
which are used only as inference results, so they can't be found in the
|
||||
original AST tree. For instance, inferring the following frozenset use,
|
||||
leads to an inferred FrozenSet:
|
||||
|
||||
Call(func=Name('frozenset'), args=Tuple(...))
|
||||
"""
|
||||
|
||||
|
||||
from astroid import bases, decorators, util
|
||||
from astroid.exceptions import (
|
||||
AttributeInferenceError,
|
||||
InferenceError,
|
||||
MroError,
|
||||
SuperError,
|
||||
)
|
||||
from astroid.manager import AstroidManager
|
||||
from astroid.nodes import node_classes, scoped_nodes
|
||||
|
||||
objectmodel = util.lazy_import("interpreter.objectmodel")
|
||||
|
||||
|
||||
class FrozenSet(node_classes.BaseContainer):
|
||||
"""class representing a FrozenSet composite node"""
|
||||
|
||||
def pytype(self):
|
||||
return "builtins.frozenset"
|
||||
|
||||
def _infer(self, context=None):
|
||||
yield self
|
||||
|
||||
@decorators.cachedproperty
|
||||
def _proxied(self): # pylint: disable=method-hidden
|
||||
ast_builtins = AstroidManager().builtins_module
|
||||
return ast_builtins.getattr("frozenset")[0]
|
||||
|
||||
|
||||
class Super(node_classes.NodeNG):
|
||||
"""Proxy class over a super call.
|
||||
|
||||
This class offers almost the same behaviour as Python's super,
|
||||
which is MRO lookups for retrieving attributes from the parents.
|
||||
|
||||
The *mro_pointer* is the place in the MRO from where we should
|
||||
start looking, not counting it. *mro_type* is the object which
|
||||
provides the MRO, it can be both a type or an instance.
|
||||
*self_class* is the class where the super call is, while
|
||||
*scope* is the function where the super call is.
|
||||
"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.SuperModel())
|
||||
|
||||
def __init__(self, mro_pointer, mro_type, self_class, scope):
|
||||
self.type = mro_type
|
||||
self.mro_pointer = mro_pointer
|
||||
self._class_based = False
|
||||
self._self_class = self_class
|
||||
self._scope = scope
|
||||
super().__init__()
|
||||
|
||||
def _infer(self, context=None):
|
||||
yield self
|
||||
|
||||
def super_mro(self):
|
||||
"""Get the MRO which will be used to lookup attributes in this super."""
|
||||
if not isinstance(self.mro_pointer, scoped_nodes.ClassDef):
|
||||
raise SuperError(
|
||||
"The first argument to super must be a subtype of "
|
||||
"type, not {mro_pointer}.",
|
||||
super_=self,
|
||||
)
|
||||
|
||||
if isinstance(self.type, scoped_nodes.ClassDef):
|
||||
# `super(type, type)`, most likely in a class method.
|
||||
self._class_based = True
|
||||
mro_type = self.type
|
||||
else:
|
||||
mro_type = getattr(self.type, "_proxied", None)
|
||||
if not isinstance(mro_type, (bases.Instance, scoped_nodes.ClassDef)):
|
||||
raise SuperError(
|
||||
"The second argument to super must be an "
|
||||
"instance or subtype of type, not {type}.",
|
||||
super_=self,
|
||||
)
|
||||
|
||||
if not mro_type.newstyle:
|
||||
raise SuperError("Unable to call super on old-style classes.", super_=self)
|
||||
|
||||
mro = mro_type.mro()
|
||||
if self.mro_pointer not in mro:
|
||||
raise SuperError(
|
||||
"The second argument to super must be an "
|
||||
"instance or subtype of type, not {type}.",
|
||||
super_=self,
|
||||
)
|
||||
|
||||
index = mro.index(self.mro_pointer)
|
||||
return mro[index + 1 :]
|
||||
|
||||
@decorators.cachedproperty
|
||||
def _proxied(self):
|
||||
ast_builtins = AstroidManager().builtins_module
|
||||
return ast_builtins.getattr("super")[0]
|
||||
|
||||
def pytype(self):
|
||||
return "builtins.super"
|
||||
|
||||
def display_type(self):
|
||||
return "Super of"
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
"""Get the name of the MRO pointer."""
|
||||
return self.mro_pointer.name
|
||||
|
||||
def qname(self):
|
||||
return "super"
|
||||
|
||||
def igetattr(self, name, context=None):
|
||||
"""Retrieve the inferred values of the given attribute name."""
|
||||
|
||||
if name in self.special_attributes:
|
||||
yield self.special_attributes.lookup(name)
|
||||
return
|
||||
|
||||
try:
|
||||
mro = self.super_mro()
|
||||
# Don't let invalid MROs or invalid super calls
|
||||
# leak out as is from this function.
|
||||
except SuperError as exc:
|
||||
raise AttributeInferenceError(
|
||||
(
|
||||
"Lookup for {name} on {target!r} because super call {super!r} "
|
||||
"is invalid."
|
||||
),
|
||||
target=self,
|
||||
attribute=name,
|
||||
context=context,
|
||||
super_=exc.super_,
|
||||
) from exc
|
||||
except MroError as exc:
|
||||
raise AttributeInferenceError(
|
||||
(
|
||||
"Lookup for {name} on {target!r} failed because {cls!r} has an "
|
||||
"invalid MRO."
|
||||
),
|
||||
target=self,
|
||||
attribute=name,
|
||||
context=context,
|
||||
mros=exc.mros,
|
||||
cls=exc.cls,
|
||||
) from exc
|
||||
found = False
|
||||
for cls in mro:
|
||||
if name not in cls.locals:
|
||||
continue
|
||||
|
||||
found = True
|
||||
for inferred in bases._infer_stmts([cls[name]], context, frame=self):
|
||||
if not isinstance(inferred, scoped_nodes.FunctionDef):
|
||||
yield inferred
|
||||
continue
|
||||
|
||||
# We can obtain different descriptors from a super depending
|
||||
# on what we are accessing and where the super call is.
|
||||
if inferred.type == "classmethod":
|
||||
yield bases.BoundMethod(inferred, cls)
|
||||
elif self._scope.type == "classmethod" and inferred.type == "method":
|
||||
yield inferred
|
||||
elif self._class_based or inferred.type == "staticmethod":
|
||||
yield inferred
|
||||
elif isinstance(inferred, Property):
|
||||
function = inferred.function
|
||||
try:
|
||||
yield from function.infer_call_result(
|
||||
caller=self, context=context
|
||||
)
|
||||
except InferenceError:
|
||||
yield util.Uninferable
|
||||
elif bases._is_property(inferred):
|
||||
# TODO: support other descriptors as well.
|
||||
try:
|
||||
yield from inferred.infer_call_result(self, context)
|
||||
except InferenceError:
|
||||
yield util.Uninferable
|
||||
else:
|
||||
yield bases.BoundMethod(inferred, cls)
|
||||
|
||||
if not found:
|
||||
raise AttributeInferenceError(target=self, attribute=name, context=context)
|
||||
|
||||
def getattr(self, name, context=None):
|
||||
return list(self.igetattr(name, context=context))
|
||||
|
||||
|
||||
class ExceptionInstance(bases.Instance):
|
||||
"""Class for instances of exceptions
|
||||
|
||||
It has special treatment for some of the exceptions's attributes,
|
||||
which are transformed at runtime into certain concrete objects, such as
|
||||
the case of .args.
|
||||
"""
|
||||
|
||||
@decorators.cachedproperty
|
||||
def special_attributes(self):
|
||||
qname = self.qname()
|
||||
instance = objectmodel.BUILTIN_EXCEPTIONS.get(
|
||||
qname, objectmodel.ExceptionInstanceModel
|
||||
)
|
||||
return instance()(self)
|
||||
|
||||
|
||||
class DictInstance(bases.Instance):
|
||||
"""Special kind of instances for dictionaries
|
||||
|
||||
This instance knows the underlying object model of the dictionaries, which means
|
||||
that methods such as .values or .items can be properly inferred.
|
||||
"""
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.DictModel())
|
||||
|
||||
|
||||
# Custom objects tailored for dictionaries, which are used to
|
||||
# disambiguate between the types of Python 2 dict's method returns
|
||||
# and Python 3 (where they return set like objects).
|
||||
class DictItems(bases.Proxy):
|
||||
__str__ = node_classes.NodeNG.__str__
|
||||
__repr__ = node_classes.NodeNG.__repr__
|
||||
|
||||
|
||||
class DictKeys(bases.Proxy):
|
||||
__str__ = node_classes.NodeNG.__str__
|
||||
__repr__ = node_classes.NodeNG.__repr__
|
||||
|
||||
|
||||
class DictValues(bases.Proxy):
|
||||
__str__ = node_classes.NodeNG.__str__
|
||||
__repr__ = node_classes.NodeNG.__repr__
|
||||
|
||||
|
||||
class PartialFunction(scoped_nodes.FunctionDef):
|
||||
"""A class representing partial function obtained via functools.partial"""
|
||||
|
||||
def __init__(
|
||||
self, call, name=None, doc=None, lineno=None, col_offset=None, parent=None
|
||||
):
|
||||
super().__init__(name, doc, lineno, col_offset, parent=None)
|
||||
# A typical FunctionDef automatically adds its name to the parent scope,
|
||||
# but a partial should not, so defer setting parent until after init
|
||||
self.parent = parent
|
||||
self.filled_args = call.positional_arguments[1:]
|
||||
self.filled_keywords = call.keyword_arguments
|
||||
|
||||
wrapped_function = call.positional_arguments[0]
|
||||
inferred_wrapped_function = next(wrapped_function.infer())
|
||||
if isinstance(inferred_wrapped_function, PartialFunction):
|
||||
self.filled_args = inferred_wrapped_function.filled_args + self.filled_args
|
||||
self.filled_keywords = {
|
||||
**inferred_wrapped_function.filled_keywords,
|
||||
**self.filled_keywords,
|
||||
}
|
||||
|
||||
self.filled_positionals = len(self.filled_args)
|
||||
|
||||
def infer_call_result(self, caller=None, context=None):
|
||||
if context:
|
||||
current_passed_keywords = {
|
||||
keyword for (keyword, _) in context.callcontext.keywords
|
||||
}
|
||||
for keyword, value in self.filled_keywords.items():
|
||||
if keyword not in current_passed_keywords:
|
||||
context.callcontext.keywords.append((keyword, value))
|
||||
|
||||
call_context_args = context.callcontext.args or []
|
||||
context.callcontext.args = self.filled_args + call_context_args
|
||||
|
||||
return super().infer_call_result(caller=caller, context=context)
|
||||
|
||||
def qname(self):
|
||||
return self.__class__.__name__
|
||||
|
||||
|
||||
# TODO: Hack to solve the circular import problem between node_classes and objects
|
||||
# This is not needed in 2.0, which has a cleaner design overall
|
||||
node_classes.Dict.__bases__ = (node_classes.NodeNG, DictInstance)
|
||||
|
||||
|
||||
class Property(scoped_nodes.FunctionDef):
|
||||
"""Class representing a Python property"""
|
||||
|
||||
def __init__(
|
||||
self, function, name=None, doc=None, lineno=None, col_offset=None, parent=None
|
||||
):
|
||||
self.function = function
|
||||
super().__init__(name, doc, lineno, col_offset, parent)
|
||||
|
||||
# pylint: disable=unnecessary-lambda
|
||||
special_attributes = util.lazy_descriptor(lambda: objectmodel.PropertyModel())
|
||||
type = "property"
|
||||
|
||||
def pytype(self):
|
||||
return "builtins.property"
|
||||
|
||||
def infer_call_result(self, caller=None, context=None):
|
||||
raise InferenceError("Properties are not callable")
|
||||
|
||||
def infer(self, context=None, **kwargs):
|
||||
return iter((self,))
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue