Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Apply various ruff groups #39

Draft
wants to merge 10 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 6 additions & 5 deletions lib/ts_utils/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,8 +23,8 @@

__all__ = [
"NoSuchStubError",
"StubMetadata",
"PackageDependencies",
"StubMetadata",
"StubtestSettings",
"get_recursive_requirements",
"read_dependencies",
Expand Down Expand Up @@ -184,7 +184,7 @@


class NoSuchStubError(ValueError):
"""Raise NoSuchStubError to indicate that a stubs/{distribution} directory doesn't exist"""
"""Raise NoSuchStubError to indicate that a stubs/{distribution} directory doesn't exist."""


@cache
Expand Down Expand Up @@ -250,7 +250,7 @@
f"Invalid upstream_repository for {distribution!r}: "
"URLs for GitHub repositories always have two parts in their paths"
)
assert num_url_path_parts == 2, bad_github_url_msg
assert num_url_path_parts == 2, bad_github_url_msg # noqa: PLR2004 # astral-sh/ruff#10009

obsolete_since: object = data.get("obsolete_since")
assert isinstance(obsolete_since, (str, type(None)))
Expand Down Expand Up @@ -280,7 +280,7 @@
assert isinstance(tools_settings, dict)
assert tools_settings.keys() <= _KNOWN_METADATA_TOOL_FIELDS.keys(), f"Unrecognised tool for {distribution!r}"
for tool, tk in _KNOWN_METADATA_TOOL_FIELDS.items():
settings_for_tool: object = tools_settings.get(tool, {})

Check failure on line 283 in lib/ts_utils/metadata.py

View workflow job for this annotation

GitHub Actions / Run pyright against the scripts and tests directories (Linux)

Type of "get" is partially unknown   Type of "get" is "Overload[(key: Unknown, /) -> (Unknown | None), (key: Unknown, default: Unknown, /) -> Unknown, (key: Unknown, default: _T@get, /) -> (Unknown | _T@get)]" (reportUnknownMemberType)

Check failure on line 283 in lib/ts_utils/metadata.py

View workflow job for this annotation

GitHub Actions / Run pyright against the scripts and tests directories (Windows)

Type of "get" is partially unknown   Type of "get" is "Overload[(key: Unknown, /) -> (Unknown | None), (key: Unknown, default: Unknown, /) -> Unknown, (key: Unknown, default: _T@get, /) -> (Unknown | _T@get)]" (reportUnknownMemberType)
assert isinstance(settings_for_tool, dict)
for key in settings_for_tool:
assert key in tk, f"Unrecognised {tool} key {key!r} for {distribution!r}"
Expand All @@ -304,16 +304,17 @@
def update_metadata(distribution: str, **new_values: object) -> tomlkit.TOMLDocument:
"""Updates a distribution's METADATA.toml.

Return the updated TOML dictionary for use without having to open the file separately."""
Return the updated TOML dictionary for use without having to open the file separately.
"""
path = metadata_path(distribution)
try:
with path.open("rb") as file:
data = tomlkit.load(file)
except FileNotFoundError:
raise NoSuchStubError(f"Typeshed has no stubs for {distribution!r}!") from None
data.update(new_values)

Check failure on line 315 in lib/ts_utils/metadata.py

View workflow job for this annotation

GitHub Actions / Run pyright against the scripts and tests directories (Linux)

Type of "update" is partially unknown   Type of "update" is "Overload[(m: SupportsKeysAndGetItem[Unknown, Unknown], /, **kwargs: Unknown) -> None, (m: Iterable[tuple[Unknown, Unknown]], /, **kwargs: Unknown) -> None, (**kwargs: Unknown) -> None]" (reportUnknownMemberType)

Check failure on line 315 in lib/ts_utils/metadata.py

View workflow job for this annotation

GitHub Actions / Run pyright against the scripts and tests directories (Windows)

Type of "update" is partially unknown   Type of "update" is "Overload[(m: SupportsKeysAndGetItem[Unknown, Unknown], /, **kwargs: Unknown) -> None, (m: Iterable[tuple[Unknown, Unknown]], /, **kwargs: Unknown) -> None, (**kwargs: Unknown) -> None]" (reportUnknownMemberType)
with path.open("w", encoding="UTF-8") as file:
tomlkit.dump(data, file)

Check failure on line 317 in lib/ts_utils/metadata.py

View workflow job for this annotation

GitHub Actions / Run pyright against the scripts and tests directories (Linux)

Type of "dump" is partially unknown   Type of "dump" is "(data: Mapping[Unknown, Unknown], fp: IO[str], *, sort_keys: bool = False) -> None" (reportUnknownMemberType)

Check failure on line 317 in lib/ts_utils/metadata.py

View workflow job for this annotation

GitHub Actions / Run pyright against the scripts and tests directories (Windows)

Type of "dump" is partially unknown   Type of "dump" is "(data: Mapping[Unknown, Unknown], fp: IO[str], *, sort_keys: bool = False) -> None" (reportUnknownMemberType)
return data


Expand All @@ -329,7 +330,7 @@

@cache
def get_pypi_name_to_typeshed_name_mapping() -> Mapping[str, str]:
return {read_metadata(dir.name).stub_distribution: dir.name for dir in STUBS_PATH.iterdir()}
return {read_metadata(directory.name).stub_distribution: directory.name for directory in STUBS_PATH.iterdir()}


@cache
Expand Down
6 changes: 2 additions & 4 deletions lib/ts_utils/paths.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,7 @@ def distribution_path(distribution_name: str) -> Path:
def tests_path(distribution_name: str) -> Path:
if distribution_name == "stdlib":
return STDLIB_PATH / TESTS_DIR
else:
return STUBS_PATH / distribution_name / TESTS_DIR
return STUBS_PATH / distribution_name / TESTS_DIR


def test_cases_path(distribution_name: str) -> Path:
Expand All @@ -35,5 +34,4 @@ def test_cases_path(distribution_name: str) -> Path:
def allowlists_path(distribution_name: str) -> Path:
if distribution_name == "stdlib":
return tests_path("stdlib") / "stubtest_allowlists"
else:
return tests_path(distribution_name)
return tests_path(distribution_name)
10 changes: 4 additions & 6 deletions lib/ts_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from packaging.requirements import Requirement

try:
from termcolor import colored as colored
from termcolor import colored as colored # pyright: ignore[reportAssignmentType] # noqa: PLC0414
except ImportError:

def colored(text: str, color: str | None = None, **kwargs: Any) -> str: # type: ignore[misc]
Expand Down Expand Up @@ -92,7 +92,6 @@ def venv_python(venv_dir: Path) -> Path:
@cache
def parse_requirements() -> Mapping[str, Requirement]:
"""Return a dictionary of requirements from the requirements file."""

with REQUIREMENTS_PATH.open(encoding="UTF-8") as requirements_file:
stripped_lines = map(strip_comments, requirements_file)
stripped_more = [li for li in stripped_lines if not li.startswith("-")]
Expand Down Expand Up @@ -120,8 +119,8 @@ def parse_stdlib_versions_file() -> SupportedVersionsDict:
result: dict[str, tuple[VersionTuple, VersionTuple]] = {}
with VERSIONS_PATH.open(encoding="UTF-8") as f:
for line in f:
line = strip_comments(line)
if line == "":
line = strip_comments(line) # noqa: PLW2901
if not line:
continue
m = VERSION_LINE_RE.match(line)
assert m, f"invalid VERSIONS line: {line}"
Expand Down Expand Up @@ -194,8 +193,7 @@ def allowlists(distribution_name: str) -> list[str]:

if distribution_name == "stdlib":
return ["common.txt", platform_allowlist, version_allowlist, combined_allowlist, local_version_allowlist]
else:
return ["stubtest_allowlist.txt", platform_allowlist]
return ["stubtest_allowlist.txt", platform_allowlist]


# ====================================================================
Expand Down
108 changes: 97 additions & 11 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,8 @@ force-exclude = ".*_pb2.pyi"
line-length = 130
# Oldest supported Python version
target-version = "py38"
fix = true
preview = true # Help catch typing-related lint issues early
# fix = true
exclude = [
# virtual environment
".env",
Expand All @@ -37,11 +38,40 @@ exclude = ["**/test_cases/**/*.py"]
# tell ruff not to flag these as e.g. "unused noqa comments"
external = ["F821", "NQA", "Y"]
select = [
# "PTH", # TODO !
# "TD", # TODO !
"A", # flake8-builtins
"ASYNC", # flake8-async
"B", # flake8-bugbear
"BLE", # flake8-blind-except
"C4", # flake8-comprehensions
"D", # pydocstyle
"DOC", # pydoclint
"DTZ", # flake8-datetimez
"EXE", # flake8-executable
"FA", # flake8-future-annotations
"FBT", # flake8-boolean-trap
"FLY", # flynt
"FURB", # refurb
"G", # flake8-logging-format
"I", # isort
"ISC", # flake8-implicit-str-concat
"LOG", # flake8-logging
"N", # pep8-naming
"NPY", # NumPy-specific rules
"PERF", # Perflint
"PGH", # pygrep-hooks
"PIE", # flake8-pie
"PL", # Pylint
"RET", # flake8-return
"RSE", # flake8-raise
"RUF", # Ruff-specific and unused-noqa
"S", # flake8-bandit
"SIM", # flake8-simplify
"SLOT", # flake8-slots
"TRY", # tryceratops
"UP", # pyupgrade
"YTT", # flake8-2020
# Flake8 base rules
"E", # pycodestyle Error
"F", # Pyflakes
Expand Down Expand Up @@ -71,10 +101,11 @@ extend-safe-fixes = [
"UP036", # Remove unnecessary `sys.version_info` blocks
]
ignore = [
# TODO: Ruff 0.8.0 added sorting of __all__ and __slots_. Validate whether we want this in stubs
"RUF022",
"RUF023",

# TODO
"ASYNC221", # I don't know how to improve subprocess.check_call calls to satisfy this
"FURB101", # TODO with PTH
"FURB103", # TODO with PTH
"RUF036", # None not at the end of the type annotation. # Request for autofix: astral-sh/ruff#15136
###
# Rules that can conflict with the formatter (Black)
# https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
Expand All @@ -86,31 +117,86 @@ ignore = [
###
# Rules we don't want or don't agree with
###
# Slower and more verbose https://github.com/astral-sh/ruff/issues/7871
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
# We're not a library, no need to document everything
"D1", # Missing docstring in ...
# We want D211: No blank lines allowed before class docstring
"D203", # 1 blank line required before class docstring
# Doesn't support split "summary line"
"D205", # 1 blank line required between summary line and description
# We want D212: Multi-line docstring summary should start at the first line
"D213", # Multi-line docstring summary should start at the second line
"D401", # First line of docstring should be in imperative mood
# Return/yield type is enough documentation for us
"DOC201", # return is not documented in docstring
"DOC402", # yield is not documented in docstring
# We're not a public library, users are contributors that already directly reads teh code, clear error messages are sufficient
"DOC501", # Raised exception missing from docstring
# Used for direct, non-subclass type comparison, for example: `type(val) is str`
# see https://github.com/astral-sh/ruff/issues/6465
"E721", # Do not compare types, use `isinstance()`
# Prefer explicit, but allow implicit multiline
# (hence lint.flake8-implicit-str-concat.allow-multiline isn't set to false)
"ISC003", # Explicitly concatenated string should be implicitly concatenated
# Python 3.11 introduced "zero cost" exception handling, our tests & scripts run on modern Python versions
"PERF203", # try-except within a loop incurs performance overhead
"PLR09", # Too many ...
# Typeshed tests and scripts are never run in optimized mode
"S101", # Use of assert detected
# We use subprocess a lot in scripts and tests
"S404", # subprocess module is possibly insecure
# Prone to false positives astral-sh/ruff#4045
"S603", # subprocess call: check for execution of untrusted input
# Full paths would make cross-environment compatibility a nightmare
"S607", # Starting a process with a partial executable path
"TRY003", # Avoid specifying long messages outside the exception class
# Slower and more verbose https://github.com/astral-sh/ruff/issues/7871
"UP038", # Use `X | Y` in `isinstance` call instead of `(X, Y)`
###
# False-positives, but already checked by type-checkers
###
# Configuring namespace-packages = ["scripts/sync_protobuf"] doesn't work ?
"PLC2701", # Private name import {name} from external module {module}
# Ruff doesn't support multi-file analysis yet: https://github.com/astral-sh/ruff/issues/5295
"RUF013", # PEP 484 prohibits implicit `Optional`
]

[tool.ruff.lint.per-file-ignores]
"*.pyi" = [
# Most flake8-bugbear rules don't apply for third-party stubs like typeshed.
# B033 could be slightly useful but Ruff doesn't have per-file select
"B", # flake8-bugbear
# TODO: Ruff 0.8.0 added sorting of __all__ and __slots_. Validate whether we want this in stubs
"RUF022",
"RUF023",
# Most pep8-naming rules don't apply for third-party stubs like typeshed.
"N80", # pep8-naming
"N815", # pep8-naming
"N816", # pep8-naming
"N818", # pep8-naming
# Rules that are out of the control of stub authors:
"A001", # builtin-variable-shadowing
"A002", # builtin-argument-shadowing
"A004", # builtin-import-shadowing
"F403", # `from . import *` used; unable to detect undefined names
"FURB189", # Subclassing {subclass} can be error prone, use collections.{replacement} instead
"PIE796", # Enum contains duplicate value # astral-sh/ruff#15132
"PLC2701", # Private name import from external module
"PLW1641", # Object does not implement __hash__ method
"S105", # Possible hardcoded password assigned
"S106", # Possible hardcoded password assigned to argument
"S107", # Possible hardcoded password assigned to function default
"S4", # Insecure and vulnerable libraries
# Often breaks mypy/stubtest
"FURB180", # Use of metaclass=abc.ABCMeta to define abstract base class
# Stubs can sometimes re-export entire modules.
# Issues with using a star-imported name will be caught by type-checkers.
"F405", # may be undefined, or defined from star imports
]
# See comment on black's force-exclude config above
"*_pb2.pyi" = [
# Non-autofixable docstring lints on autogenerated modules
"D210", # No whitespaces allowed surrounding docstring text
# These modify the docstring content
"D301", # Use r""" if any backslashes in a docstring
"D40",
"D415", # First line should end with a period, question mark, or exclamation point
# See comment on black's force-exclude config above
"E501", # Line too long
]

Expand Down
27 changes: 13 additions & 14 deletions scripts/create_baseline_stubs.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
import subprocess
import sys
import urllib.parse
from http import HTTPStatus
from importlib.metadata import distribution

import aiohttp
Expand Down Expand Up @@ -45,34 +46,33 @@ def get_installed_package_info(project: str) -> tuple[str, str] | None:

Return (normalized project name, installed version) if successful.
"""
r = subprocess.run(["pip", "freeze"], capture_output=True, text=True, check=True)
return search_pip_freeze_output(project, r.stdout)
return search_pip_freeze_output(project, subprocess.check_output(["pip", "freeze"], text=True))


def run_stubgen(package: str, output: str) -> None:
print(f"Running stubgen: stubgen -o {output} -p {package}")
subprocess.run(["stubgen", "-o", output, "-p", package, "--export-less"], check=True)
subprocess.check_call(["stubgen", "-o", output, "-p", package, "--export-less"])


def run_stubdefaulter(stub_dir: str) -> None:
print(f"Running stubdefaulter: stubdefaulter --packages {stub_dir}")
subprocess.run(["stubdefaulter", "--packages", stub_dir])
subprocess.run(["stubdefaulter", "--packages", stub_dir], check=False)


def run_black(stub_dir: str) -> None:
print(f"Running Black: black {stub_dir}")
subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")])
subprocess.run(["pre-commit", "run", "black", "--files", *glob.iglob(f"{stub_dir}/**/*.pyi")], check=False)


def run_ruff(stub_dir: str) -> None:
print(f"Running Ruff: ruff check {stub_dir} --fix-only")
subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"])
subprocess.run([sys.executable, "-m", "ruff", "check", stub_dir, "--fix-only"], check=False)


async def get_project_urls_from_pypi(project: str, session: aiohttp.ClientSession) -> dict[str, str]:
pypi_root = f"https://pypi.org/pypi/{urllib.parse.quote(project)}"
async with session.get(f"{pypi_root}/json") as response:
if response.status != 200:
if response.status != HTTPStatus.OK:
return {}
j: dict[str, dict[str, dict[str, str]]]
j = await response.json()
Expand All @@ -90,24 +90,23 @@ async def get_upstream_repo_url(project: str) -> str | None:

# Order the project URLs so that we put the ones
# that are most likely to point to the source code first
urls_to_check: list[str] = []
url_names_probably_pointing_to_source = ("Source", "Repository", "Homepage")
for url_name in url_names_probably_pointing_to_source:
if url := project_urls.get(url_name):
urls_to_check.append(url)
urls_to_check: list[str] = [
url for url in (project_urls.get(url_name) for url_name in url_names_probably_pointing_to_source) if url
]
urls_to_check.extend(
url for url_name, url in project_urls.items() if url_name not in url_names_probably_pointing_to_source
)

for url in urls_to_check:
# Remove `www.`; replace `http://` with `https://`
url = re.sub(r"^(https?://)?(www\.)?", "https://", url)
url = re.sub(r"^(https?://)?(www\.)?", "https://", url) # noqa: PLW2901
netloc = urllib.parse.urlparse(url).netloc
if netloc in {"gitlab.com", "github.com", "bitbucket.org", "foss.heptapod.net"}:
# truncate to https://site.com/user/repo
upstream_repo_url = "/".join(url.split("/")[:5])
async with session.get(upstream_repo_url) as response:
if response.status == 200:
if response.status == HTTPStatus.OK:
return upstream_repo_url
return None

Expand Down Expand Up @@ -218,7 +217,7 @@ def main() -> None:
info = get_installed_package_info(project)
if info is None:
print(f'Error: "{project}" is not installed', file=sys.stderr)
print("", file=sys.stderr)
print(file=sys.stderr)
print(f'Suggestion: Run "python3 -m pip install {project}" and try again', file=sys.stderr)
sys.exit(1)
project, version = info
Expand Down
Loading
Loading