Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add ruff.toml with Python lint configuration #2587

Merged
merged 3 commits into from
Aug 22, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .git-blame-ignore-revs
Original file line number Diff line number Diff line change
Expand Up @@ -9,3 +9,6 @@ d6d0607a845e6f71084ce272a1c1e8c50e244bdd

# Apply buildifier to the project
f457f19039b82536b35659c1f9cb898a198e6cd1

# Apply ruff linter to the project
893774eab71fd7be5000436ff2ff0b5dd85ef073
25 changes: 25 additions & 0 deletions .ruff.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
target-version = "py312"

[lint]
select = [
"B0", # bugbear (all B0* checks enabled by default)
"B904", # bugbear (Within an except clause, raise exceptions with raise ... from err)
"B905", # bugbear (zip() without an explicit strict= parameter set.)
"E", # pycodestyles
"W", # pycodestyles
"F", # pyflakes
"I", # isort
"PGH", # pygrep-hooks
"PLC", # pylint conventions
"PLE", # pylint errors
"UP", # pyupgrade
]
ignore = [
"E402", # module import not at top of file
"UP038", # Use X | Y in isinstance check instead of (X, Y)
]

[lint.per-file-ignores]
# Want to preserve compatibility for old Python versions in tools directory so
# disable pyupgrade. Oldest supported version is currently 3.9.
"tools/*" = ["UP"]
15 changes: 5 additions & 10 deletions samples/pyodide-fastapi/worker.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,31 +7,26 @@ async def on_fetch(request):
return await asgi.fetch(app, request, env)


def test():
import fastapi


# Set up fastapi app

from fastapi import FastAPI
from pydantic import BaseModel


app = FastAPI()


@app.get("/hello")
async def root(env=env):
async def hello(env=env):
return {"message": "Hello World", "secret": env.secret}


@app.get("/route")
async def root():
async def route():
return {"message": "this is my custom route"}


@app.get("/favicon.ico")
async def root():
async def favicon():
return {"message": "here's a favicon I guess?"}


Expand All @@ -53,8 +48,8 @@ async def create_item(item: Item):


@app.put("/items/{item_id}")
async def create_item(item_id: int, item: Item, q: str | None = None):
result = {"item_id": item_id, **item.dict()}
async def create_item2(item_id: int, item: Item, q: str | None = None):
result = {"item_id": item_id, **item.model_dump()}
if q:
result.update({"q": q})
return result
1 change: 0 additions & 1 deletion samples/pyodide-langchain/worker.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
from js import Response
from langchain_core.prompts import PromptTemplate
from langchain_openai import OpenAI

Expand Down
9 changes: 3 additions & 6 deletions samples/repl-server-python/worker.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
from js import Response

import io

import code

from io import StringIO
import sys
from io import StringIO

from js import Response

sys.stdout = StringIO()

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,11 +2,9 @@
# Licensed under the Apache 2.0 license found in the LICENSE file or at:
# https://opensource.org/licenses/Apache-2.0

from js import Float32Array
from js import JSON
from js import Float32Array, Object

from pyodide.ffi import to_js as _to_js
from js import Object


def to_js(obj):
Expand Down
10 changes: 6 additions & 4 deletions src/pyodide/internal/asgi.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from asyncio import Future, ensure_future, Queue, sleep
from inspect import isawaitable
from asyncio import Future, Queue, ensure_future, sleep
from contextlib import contextmanager
from fastapi import Request, Depends
from inspect import isawaitable

from fastapi import Depends, Request

ASGI = {"spec_version": "2.0", "version": "3.0"}

Expand Down Expand Up @@ -93,7 +94,8 @@ async def send(got):


async def process_request(app, req, env):
from js import Response, Object
from js import Object, Response

from pyodide.ffi import create_proxy

status = None
Expand Down
39 changes: 23 additions & 16 deletions src/pyodide/internal/patches/aiohttp.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,20 @@
# Monkeypatch aiohttp to introduce Fetch API support.
#
# Based on https://github.com/pyodide/pyodide/issues/3711#issuecomment-1773523301
# with some modifications.
"""
Monkeypatch aiohttp to introduce Fetch API support.

from multidict import CIMultiDict, istr
from aiohttp import payload, InvalidURL, hdrs, ClientSession, ClientTimeout
from aiohttp.client_reqrep import _merge_ssl_params
from aiohttp.helpers import TimeoutHandle, strip_auth_from_url, get_env_proxy_for_url
Based on https://github.com/pyodide/pyodide/issues/3711#issuecomment-1773523301
with some modifications.
"""

# ruff: noqa

from collections.abc import Iterable
from contextlib import suppress
from typing import Any, Optional, Iterable
from typing import Any

from aiohttp import ClientSession, ClientTimeout, InvalidURL, hdrs, payload
from aiohttp.client_reqrep import _merge_ssl_params
from aiohttp.helpers import TimeoutHandle, get_env_proxy_for_url, strip_auth_from_url
from multidict import CIMultiDict, istr
from yarl import URL


Expand Down Expand Up @@ -43,25 +49,25 @@ async def _request(
json: Any = None,
cookies=None,
headers=None,
skip_auto_headers: Optional[Iterable[str]] = None,
skip_auto_headers: Iterable[str] | None = None,
auth=None,
allow_redirects: bool = True,
max_redirects: int = 10,
compress: Optional[str] = None,
chunked: Optional[bool] = None,
compress: str | None = None,
chunked: bool | None = None,
expect100: bool = False,
raise_for_status=None,
read_until_eof: bool = True,
proxy=None,
proxy_auth=None,
timeout=None,
verify_ssl: Optional[bool] = None,
fingerprint: Optional[bytes] = None,
verify_ssl: bool | None = None,
fingerprint: bytes | None = None,
ssl_context=None,
ssl=None,
proxy_headers=None,
trace_request_ctx=None,
read_bufsize: Optional[int] = None,
read_bufsize: int | None = None,
):
# NOTE: timeout clamps existing connect and read timeouts. We cannot
# set the default to None because we need to detect if the user wants
Expand Down Expand Up @@ -191,7 +197,8 @@ async def _request(
loop=req.loop,
session=req._session,
)
from js import fetch, Headers
from js import Headers, fetch

from pyodide.ffi import to_js

body = None
Expand Down
11 changes: 6 additions & 5 deletions src/pyodide/internal/patches/httpx.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
from httpx._transports.default import AsyncResponseStream
from httpx._types import AsyncByteStream
from httpx._utils import Timer

from js import Headers as js_Headers
from js import fetch

from pyodide.ffi import create_proxy


Expand All @@ -29,8 +29,9 @@ def acquire_buffer(content):


async def js_readable_stream_iter(js_readable_stream):
"""Readable streams are supposed to be async iterators some day but they aren't yet.
In the meantime, this is an adaptor that produces an async iterator from a readable stream.
"""Readable streams are supposed to be async iterators some day but they
aren't yet. In the meantime, this is an adaptor that produces an async
iterator from a readable stream.
"""
reader = js_readable_stream.getReader()
while True:
Expand Down Expand Up @@ -64,8 +65,8 @@ async def _send_single_request(self, request: Request) -> Response:
)

py_headers = Headers(js_resp.headers)
# Unset content-encoding b/c Javascript fetch already handled unpacking. If we leave it we will
# get errors when httpx tries to unpack a second time.
# Unset content-encoding b/c Javascript fetch already handled unpacking. If
# we leave it we will get errors when httpx tries to unpack a second time.
py_headers.pop("content-encoding", None)
response = Response(
status_code=js_resp.status,
Expand Down
22 changes: 13 additions & 9 deletions src/pyodide/internal/process_script_imports.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,17 @@
# This script is used to prepare a worker prior to a _package_ memory snapshot being taken.
# All it does is walk through the imports in each of the worker's modules and attempts to import
# them. Local imports are not possible because the worker file path is explicitly removed from the
# module search path.
"""
This script is used to prepare a worker prior to a _package_ memory snapshot
being taken. All it does is walk through the imports in each of the worker's
modules and attempts to import them. Local imports are not possible because
the worker file path is explicitly removed from the module search path.
"""

CF_LOADED_MODULES = []


def _do_it():
import ast
from pathlib import Path
import sys
from pathlib import Path

def find_imports(source: str) -> list[str]:
try:
Expand Down Expand Up @@ -36,10 +39,11 @@ def process_script(script):
pass

def process_scripts():
# Currently this script assumes that it is generating a _package_ snapshot- one that
# only includes non-vendored packages. Because of this we do not wish to import local
# modules, the easiest way to ensure they cannot be imported is to remove
# `/session/metadata` from the sys path.
# Currently this script assumes that it is generating a _package_
# snapshot- one that only includes non-vendored packages. Because of
# this we do not wish to import local modules, the easiest way to ensure
# they cannot be imported is to remove `/session/metadata` from the sys
# path.
worker_files_path = "/session/metadata"
sys.path.remove(worker_files_path)
for script in Path(worker_files_path).glob("**/*.py"):
Expand Down
29 changes: 16 additions & 13 deletions src/pyodide/internal/topLevelEntropy/entropy_import_context.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,11 +13,11 @@
Other rust packages are likely to need similar treatment to pydantic_core.
"""

from contextlib import contextmanager
import sys
from array import array
from .import_patch_manager import block_calls
from contextlib import contextmanager

import sys
from .import_patch_manager import block_calls

RUST_PACKAGES = ["pydantic_core", "tiktoken"]
MODULES_TO_PATCH = [
Expand All @@ -34,8 +34,9 @@


def get_bad_entropy_flag():
# simpleRunPython reads out stderr. We put the address there so we can fish it out...
# We could use ctypes instead of array but ctypes weighs an extra 100kb compared to array.
# simpleRunPython reads out stderr. We put the address there so we can fish
# it out... We could use ctypes instead of array but ctypes weighs an extra
# 100kb compared to array.
print(ALLOWED_ENTROPY_CALLS.buffer_info()[0], file=sys.stderr)


Expand Down Expand Up @@ -72,7 +73,8 @@ def get_entropy_import_context(name):
if res:
return res
if name in RUST_PACKAGES:
# Initial import needs one entropy call to initialize std::collections::HashMap hash seed
# Initial import needs one entropy call to initialize
# std::collections::HashMap hash seed
return rust_package_context
raise Exception(f"Missing context for {name}")

Expand All @@ -95,7 +97,7 @@ def random_context(module):
# instantiating it without a seed will call getentropy() and fail.
# Instantiating SystemRandom is fine, calling it's methods will call
# getentropy() and fail.
block_calls(module, allowlist=["Random", "SystemRandom"])
block_calls(module, allowlist=("Random", "SystemRandom"))


@contextmanager
Expand All @@ -109,7 +111,7 @@ def numpy_random_context(module):
yield
# Calling default_rng() with a given seed is fine, calling it without a seed
# will call getentropy() and fail.
block_calls(module, allowlist=["default_rng"])
block_calls(module, allowlist=("default_rng",))


@contextmanager
Expand All @@ -125,15 +127,16 @@ def numpy_random_mtrand_context(module):
@contextmanager
def pydantic_core_context(module):
try:
# Initial import needs one entropy call to initialize std::collections::HashMap hash seed
# Initial import needs one entropy call to initialize
# std::collections::HashMap hash seed
with allow_bad_entropy_calls(1):
yield
finally:
try:
with allow_bad_entropy_calls(1):
# validate_core_schema makes an ahash::AHashMap which makes another entropy call for
# its hash seed. It will throw an error but only after making the needed entropy
# call.
# validate_core_schema makes an ahash::AHashMap which makes
# another entropy call for its hash seed. It will throw an error
# but only after making the needed entropy call.
module.validate_core_schema(None)
except module.SchemaError:
pass
Expand All @@ -152,7 +155,7 @@ def patched_Random():
try:
yield
finally:
random.Random = random
random.Random = Random


class DeterministicRandomNameSequence:
Expand Down
Loading
Loading