diff --git a/.github/workflows/check-release.yml b/.github/workflows/check-release.yml index c3bd33ff..4c111740 100644 --- a/.github/workflows/check-release.yml +++ b/.github/workflows/check-release.yml @@ -44,7 +44,7 @@ jobs: - name: Install Dependencies run: | pip install -e . --no-deps - pip install -e plugins/auth_base + pip install -e jupyverse_api pip install -e plugins/frontend pip install -e plugins/jupyterlab pip install -e plugins/retrolab @@ -54,6 +54,8 @@ jobs: pip install -e plugins/nbconvert pip install -e plugins/yjs pip install -e plugins/auth + pip install -e plugins/noauth + pip install -e plugins/auth_fief pip install -e plugins/login - name: Check Release if: ${{ matrix.group == 'check_release' }} diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index cdddfd65..1d5d9498 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,15 +34,15 @@ jobs: - name: Upgrade pip run: python3 -m pip install --upgrade pip - - name: Create jupyterlab-auth dev environment + - name: Create jupyterlab dev environment run: | pip install hatch - hatch env create dev.jupyterlab-auth + hatch env create dev.jupyterlab - name: Check types run: | - hatch run dev.jupyterlab-auth:typecheck + hatch run dev.jupyterlab:typecheck - name: Run tests run: | - hatch run dev.jupyterlab-auth:test + hatch run dev.jupyterlab:test diff --git a/config.yaml b/config.yaml new file mode 100644 index 00000000..7a53db6b --- /dev/null +++ b/config.yaml @@ -0,0 +1,52 @@ +--- +component: + type: jupyverse + components: + app: + type: app + auth: + type: auth + #auth: + # type: auth_fief + #auth: + # type: noauth + contents: + type: contents + frontend: + type: frontend + lab: + type: lab + jupyterlab: + type: jupyterlab + kernels: + type: kernels + login: + type: login + nbconvert: + type: nbconvert + resource_usage: + type: resource_usage + track_cpu_percent: true + #retrolab: + # type: retrolab + terminals: + type: terminals + yjs: + type: yjs + +logging: + version: 1 + disable_existing_loggers: false + formatters: + default: + format: '[%(asctime)s %(levelname)s] %(message)s' + handlers: + console: + class: logging.StreamHandler + formatter: default + root: + handlers: [console] + level: INFO + loggers: + webnotifier: + level: DEBUG diff --git a/plugins/auth_base/fps_auth_base/py.typed b/jupyverse/py.typed similarity index 100% rename from plugins/auth_base/fps_auth_base/py.typed rename to jupyverse/py.typed diff --git a/plugins/auth_base/COPYING.md b/jupyverse_api/COPYING.md similarity index 100% rename from plugins/auth_base/COPYING.md rename to jupyverse_api/COPYING.md diff --git a/jupyverse_api/README.md b/jupyverse_api/README.md new file mode 100644 index 00000000..99cbbb09 --- /dev/null +++ b/jupyverse_api/README.md @@ -0,0 +1,3 @@ +# Jupyverse API + +The public API for Jupyverse. diff --git a/jupyverse_api/jupyverse_api/__init__.py b/jupyverse_api/jupyverse_api/__init__.py new file mode 100644 index 00000000..ae81f041 --- /dev/null +++ b/jupyverse_api/jupyverse_api/__init__.py @@ -0,0 +1,42 @@ +from typing import Dict + +from pydantic import BaseModel, Extra + +from .app import App + + +__version__ = "0.0.50" + + +class Singleton(type): + _instances: Dict = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] + + +class Config(BaseModel): + class Config: + extra = Extra.forbid + + +class Router: + _app: App + + def __init__( + self, + app: App, + ) -> None: + self._app = app + + @property + def _type(self): + return self.__class__.__name__ + + def include_router(self, router, **kwargs): + self._app._include_router(router, self._type, **kwargs) + + def mount(self, path: str, *args, **kwargs) -> None: + self._app._mount(path, self._type, *args, **kwargs) diff --git a/jupyverse_api/jupyverse_api/app/__init__.py b/jupyverse_api/jupyverse_api/app/__init__.py new file mode 100644 index 00000000..75e01c84 --- /dev/null +++ b/jupyverse_api/jupyverse_api/app/__init__.py @@ -0,0 +1,51 @@ +import logging +from collections import defaultdict +from typing import Dict, List + +from fastapi import FastAPI + +from ..exceptions import RedirectException, _redirect_exception_handler + + +logger = logging.getLogger("app") + + +class App: + """A wrapper around FastAPI that checks for endpoint path conflicts.""" + + _app: FastAPI + _router_paths: Dict[str, List[str]] + + def __init__(self, app: FastAPI): + self._app = app + app.add_exception_handler(RedirectException, _redirect_exception_handler) + self._router_paths = defaultdict(list) + + @property + def _paths(self): + return [path for router, paths in self._router_paths.items() for path in paths] + + def _include_router(self, router, _type, **kwargs) -> None: + new_paths = [] + for route in router.routes: + path = kwargs.get("prefix", "") + route.path + for _router, _paths in self._router_paths.items(): + if path in _paths: + raise RuntimeError( + f"{_type} adds a handler for a path that is already defined in " + f"{_router}: {path}" + ) + logger.debug("%s added handler for path: %s", _type, path) + new_paths.append(path) + self._router_paths[_type].extend(new_paths) + self._app.include_router(router, **kwargs) + + def _mount(self, path: str, _type, *args, **kwargs) -> None: + for _router, _paths in self._router_paths.items(): + if path in _paths: + raise RuntimeError( + f"{_type } mounts a path that is already defined in {_router}: {path}" + ) + self._router_paths[_type].append(path) + logger.debug("%s mounted path: %s", _type, path) + self._app.mount(path, *args, **kwargs) diff --git a/jupyverse_api/jupyverse_api/auth/__init__.py b/jupyverse_api/jupyverse_api/auth/__init__.py new file mode 100644 index 00000000..f8cd3d7b --- /dev/null +++ b/jupyverse_api/jupyverse_api/auth/__init__.py @@ -0,0 +1,27 @@ +from abc import ABC, abstractmethod +from typing import Any, Callable, Dict, List, Optional, Tuple + +from jupyverse_api import Config + +from .models import User # noqa + + +class Auth(ABC): + @abstractmethod + def current_user(self, permissions: Optional[Dict[str, List[str]]] = None) -> Callable: + ... + + @abstractmethod + async def update_user(self) -> Callable: + ... + + @abstractmethod + def websocket_auth( + self, + permissions: Optional[Dict[str, List[str]]] = None, + ) -> Callable[[], Tuple[Any, Dict[str, List[str]]]]: + ... + + +class AuthConfig(Config): + pass diff --git a/plugins/noauth/fps_noauth/models.py b/jupyverse_api/jupyverse_api/auth/models.py similarity index 91% rename from plugins/noauth/fps_noauth/models.py rename to jupyverse_api/jupyverse_api/auth/models.py index 47f1b6e2..c6337013 100644 --- a/plugins/noauth/fps_noauth/models.py +++ b/jupyverse_api/jupyverse_api/auth/models.py @@ -1,10 +1,8 @@ from typing import Optional - from pydantic import BaseModel class User(BaseModel): - anonymous: bool = True username: str = "" name: str = "" display_name: str = "" diff --git a/jupyverse_api/jupyverse_api/contents/__init__.py b/jupyverse_api/jupyverse_api/contents/__init__.py new file mode 100644 index 00000000..d5256d4e --- /dev/null +++ b/jupyverse_api/jupyverse_api/contents/__init__.py @@ -0,0 +1,38 @@ +import asyncio +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Dict, Union + +from jupyverse_api import Router + +from .models import Content, SaveContent + + +class FileIdManager(ABC): + stop_watching_files: asyncio.Event + stopped_watching_files: asyncio.Event + + @abstractmethod + async def get_path(self, file_id: str) -> str: + ... + + @abstractmethod + async def get_id(self, file_path: str) -> str: + ... + + +class Contents(Router, ABC): + @property + @abstractmethod + def file_id_manager(self) -> FileIdManager: + ... + + @abstractmethod + async def read_content( + self, path: Union[str, Path], get_content: bool, as_json: bool = False + ) -> Content: + ... + + @abstractmethod + async def write_content(self, content: Union[SaveContent, Dict]) -> None: + ... diff --git a/jupyverse_api/jupyverse_api/contents/models.py b/jupyverse_api/jupyverse_api/contents/models.py new file mode 100644 index 00000000..00b3ca79 --- /dev/null +++ b/jupyverse_api/jupyverse_api/contents/models.py @@ -0,0 +1,23 @@ +from typing import Dict, List, Optional, Union + +from pydantic import BaseModel + + +class Content(BaseModel): + name: str + path: str + last_modified: Optional[str] + created: Optional[str] + content: Optional[Union[str, Dict, List[Dict]]] + format: Optional[str] + mimetype: Optional[str] + size: Optional[int] + writable: bool + type: str + + +class SaveContent(BaseModel): + content: Optional[Union[str, Dict]] + format: str + path: str + type: str diff --git a/jupyverse_api/jupyverse_api/exceptions.py b/jupyverse_api/jupyverse_api/exceptions.py new file mode 100644 index 00000000..81bc916e --- /dev/null +++ b/jupyverse_api/jupyverse_api/exceptions.py @@ -0,0 +1,11 @@ +from fastapi import Request, Response +from fastapi.responses import RedirectResponse + + +class RedirectException(Exception): + def __init__(self, redirect_to: str): + self.redirect_to = redirect_to + + +async def _redirect_exception_handler(request: Request, exc: RedirectException) -> Response: + return RedirectResponse(url=exc.redirect_to) diff --git a/jupyverse_api/jupyverse_api/frontend/__init__.py b/jupyverse_api/jupyverse_api/frontend/__init__.py new file mode 100644 index 00000000..6a7acc62 --- /dev/null +++ b/jupyverse_api/jupyverse_api/frontend/__init__.py @@ -0,0 +1,6 @@ +from jupyverse_api import Config + + +class FrontendConfig(Config): + base_url: str = "/" + collaborative: bool = False diff --git a/jupyverse_api/jupyverse_api/jupyterlab/__init__.py b/jupyverse_api/jupyverse_api/jupyterlab/__init__.py new file mode 100644 index 00000000..373628ad --- /dev/null +++ b/jupyverse_api/jupyverse_api/jupyterlab/__init__.py @@ -0,0 +1,9 @@ +from jupyverse_api import Config, Router + + +class JupyterLab(Router): + pass + + +class JupyterLabConfig(Config): + dev_mode: bool = False diff --git a/jupyverse_api/jupyverse_api/kernels/__init__.py b/jupyverse_api/jupyverse_api/kernels/__init__.py new file mode 100644 index 00000000..83dc050c --- /dev/null +++ b/jupyverse_api/jupyverse_api/kernels/__init__.py @@ -0,0 +1,16 @@ +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Optional + +from jupyverse_api import Router, Config + + +class Kernels(Router, ABC): + @abstractmethod + async def watch_connection_files(self, path: Path) -> None: + ... + + +class KernelsConfig(Config): + default_kernel: str = "python3" + connection_path: Optional[str] = None diff --git a/plugins/kernels/fps_kernels/models.py b/jupyverse_api/jupyverse_api/kernels/models.py similarity index 100% rename from plugins/kernels/fps_kernels/models.py rename to jupyverse_api/jupyverse_api/kernels/models.py diff --git a/jupyverse_api/jupyverse_api/lab/__init__.py b/jupyverse_api/jupyverse_api/lab/__init__.py new file mode 100644 index 00000000..151dea2e --- /dev/null +++ b/jupyverse_api/jupyverse_api/lab/__init__.py @@ -0,0 +1,18 @@ +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Any, Dict, List, Tuple + +from fastapi import APIRouter +from jupyverse_api import Router + + +class Lab(Router, ABC): + @abstractmethod + def init_router( + self, router: APIRouter, redirect_after_root: str + ) -> Tuple[Path, List[Dict[str, Any]]]: + ... + + @abstractmethod + def get_federated_extensions(self, extensions_dir: Path) -> Tuple[List, List]: + ... diff --git a/jupyverse_api/jupyverse_api/login/__init__.py b/jupyverse_api/jupyverse_api/login/__init__.py new file mode 100644 index 00000000..7f3b2b63 --- /dev/null +++ b/jupyverse_api/jupyverse_api/login/__init__.py @@ -0,0 +1,5 @@ +from jupyverse_api import Router + + +class Login(Router): + pass diff --git a/jupyverse_api/jupyverse_api/main/__init__.py b/jupyverse_api/jupyverse_api/main/__init__.py new file mode 100644 index 00000000..8a77dff7 --- /dev/null +++ b/jupyverse_api/jupyverse_api/main/__init__.py @@ -0,0 +1,24 @@ +from asphalt.core import Component, Context +from asphalt.web.fastapi import FastAPIComponent +from fastapi import FastAPI + +from ..app import App + + +class AppComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(FastAPI) + + _app = App(app) + ctx.add_resource(_app) + + +class JupyverseComponent(FastAPIComponent): + async def start( + self, + ctx: Context, + ) -> None: + await super().start(ctx) diff --git a/jupyverse_api/jupyverse_api/nbconvert/__init__.py b/jupyverse_api/jupyverse_api/nbconvert/__init__.py new file mode 100644 index 00000000..d4b5122c --- /dev/null +++ b/jupyverse_api/jupyverse_api/nbconvert/__init__.py @@ -0,0 +1,5 @@ +from jupyverse_api import Router + + +class Nbconvert(Router): + pass diff --git a/jupyverse_api/jupyverse_api/py.typed b/jupyverse_api/jupyverse_api/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/jupyverse_api/jupyverse_api/resource_usage/__init__.py b/jupyverse_api/jupyverse_api/resource_usage/__init__.py new file mode 100644 index 00000000..a2550e63 --- /dev/null +++ b/jupyverse_api/jupyverse_api/resource_usage/__init__.py @@ -0,0 +1,13 @@ +from jupyverse_api import Router, Config + + +class ResourceUsage(Router): + pass + + +class ResourceUsageConfig(Config): + mem_limit: int = 0 + mem_warning_threshold: int = 0 + track_cpu_percent: bool = False + cpu_limit: int = 0 + cpu_warning_threshold: int = 0 diff --git a/jupyverse_api/jupyverse_api/retrolab/__init__.py b/jupyverse_api/jupyverse_api/retrolab/__init__.py new file mode 100644 index 00000000..1110d257 --- /dev/null +++ b/jupyverse_api/jupyverse_api/retrolab/__init__.py @@ -0,0 +1,5 @@ +from jupyverse_api import Router + + +class RetroLab(Router): + pass diff --git a/jupyverse_api/jupyverse_api/terminals/__init__.py b/jupyverse_api/jupyverse_api/terminals/__init__.py new file mode 100644 index 00000000..beb52338 --- /dev/null +++ b/jupyverse_api/jupyverse_api/terminals/__init__.py @@ -0,0 +1,16 @@ +from abc import ABC, abstractmethod +from jupyverse_api import Router + + +class Terminals(Router): + pass + + +class TerminalServer(ABC): + @abstractmethod + async def serve(self, websocket, permissions): + ... + + @abstractmethod + def quit(self, websocket): + ... diff --git a/jupyverse_api/jupyverse_api/yjs/__init__.py b/jupyverse_api/jupyverse_api/yjs/__init__.py new file mode 100644 index 00000000..00d53157 --- /dev/null +++ b/jupyverse_api/jupyverse_api/yjs/__init__.py @@ -0,0 +1,7 @@ +from typing import Type + +from jupyverse_api import Router + + +class Yjs(Router): + YDocWebSocketHandler: Type diff --git a/jupyverse_api/pyproject.toml b/jupyverse_api/pyproject.toml new file mode 100644 index 00000000..62a2606c --- /dev/null +++ b/jupyverse_api/pyproject.toml @@ -0,0 +1,42 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "jupyverse_api" +description = "The public API for Jupyverse" +readme = "README.md" +requires-python = ">=3.8" +keywords = [ + "jupyverse", "api", +] +authors = [ + { name = "Jupyter Development Team", email = "jupyter@googlegroups.com" }, +] +classifiers = [ + "Development Status :: 4 - Beta", + "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", + "Programming Language :: Python :: Implementation :: PyPy", +] +dependencies = [ + "pydantic >=1.10.6,<2", +] +dynamic = ["version"] + +[project.license] +text = "BSD 3-Clause License" + +[project.urls] +Source = "https://github.com/jupyter-server/jupyverse/api" + +[project.entry-points."asphalt.components"] +app = "jupyverse_api.main:AppComponent" +jupyverse = "jupyverse_api.main:JupyverseComponent" + +[tool.hatch.version] +path = "jupyverse_api/__init__.py" diff --git a/plugins/__init__.py b/plugins/__init__.py deleted file mode 100644 index 30cfb9c9..00000000 --- a/plugins/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -"""Top level fps_plugins namespace.""" -__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/plugins/auth/fps_auth/backends.py b/plugins/auth/fps_auth/backends.py index 23461a82..548357b5 100644 --- a/plugins/auth/fps_auth/backends.py +++ b/plugins/auth/fps_auth/backends.py @@ -1,9 +1,11 @@ +import logging import uuid +from dataclasses import dataclass from typing import Any, Dict, Generic, List, Optional, Tuple import httpx from fastapi import Depends, HTTPException, Response, WebSocket, status -from fastapi_users import ( # type: ignore +from fastapi_users import ( BaseUserManager, FastAPIUsers, UUIDIDMixin, @@ -17,252 +19,263 @@ from fastapi_users.authentication.strategy.base import Strategy from fastapi_users.authentication.transport.base import Transport from fastapi_users.db import SQLAlchemyUserDatabase -from fps.exceptions import RedirectException # type: ignore -from fps.logging import get_configured_logger # type: ignore -from fps_lab.config import get_lab_config # type: ignore -from httpx_oauth.clients.github import GitHubOAuth2 # type: ignore +from httpx_oauth.clients.github import GitHubOAuth2 +from jupyverse_api.exceptions import RedirectException +from jupyverse_api.frontend import FrontendConfig from starlette.requests import Request -from .config import get_auth_config -from .db import User, get_user_db, secret +from .config import _AuthConfig +from .db import User from .models import UserCreate, UserRead -logger = get_configured_logger("auth") +logger = logging.getLogger("auth") -class NoAuthTransport(Transport): - scheme = None # type: ignore - async def get_login_response(self, token: str, response: Response): - pass +@dataclass +class Res: + cookie_authentication: Any + current_user: Any + update_user: Any + fapi_users: Any + get_user_manager: Any + github_authentication: Any + github_cookie_authentication: Any + websocket_auth: Any - async def get_logout_response(self, response: Response): - pass - @staticmethod - def get_openapi_login_responses_success(): - pass +def get_backend(auth_config: _AuthConfig, frontend_config: FrontendConfig, db) -> Res: + class NoAuthTransport(Transport): + scheme = None # type: ignore - @staticmethod - def get_openapi_logout_responses_success(): - pass + async def get_login_response(self, token: str, response: Response): + pass + async def get_logout_response(self, response: Response): + pass -class NoAuthStrategy(Strategy, Generic[models.UP, models.ID]): - async def read_token( - self, token: Optional[str], user_manager: BaseUserManager[models.UP, models.ID] - ) -> Optional[models.UP]: - active_user = await user_manager.user_db.get_by_email(get_auth_config().global_email) - return active_user + @staticmethod + def get_openapi_login_responses_success(): + pass - async def write_token(self, user: models.UP): - pass + @staticmethod + def get_openapi_logout_responses_success(): + pass - async def destroy_token(self, token: str, user: models.UP): - pass + class NoAuthStrategy(Strategy, Generic[models.UP, models.ID]): + async def read_token( + self, token: Optional[str], user_manager: BaseUserManager[models.UP, models.ID] + ) -> Optional[models.UP]: + active_user = await user_manager.user_db.get_by_email(auth_config.global_email) + return active_user + async def write_token(self, user: models.UP): + pass -class GitHubTransport(CookieTransport): - async def get_login_response(self, token: str, response: Response): - await super().get_login_response(token, response) - response.status_code = status.HTTP_302_FOUND - response.headers["Location"] = "/lab" + async def destroy_token(self, token: str, user: models.UP): + pass + class GitHubTransport(CookieTransport): + async def get_login_response(self, token: str, response: Response): + await super().get_login_response(token, response) + response.status_code = status.HTTP_302_FOUND + response.headers["Location"] = "/lab" -noauth_transport = NoAuthTransport() -cookie_transport = CookieTransport(cookie_secure=get_auth_config().cookie_secure) -github_transport = GitHubTransport() + def get_noauth_strategy() -> NoAuthStrategy: + return NoAuthStrategy() + def get_jwt_strategy() -> JWTStrategy: + return JWTStrategy(secret=db.secret, lifetime_seconds=None) -def get_noauth_strategy() -> NoAuthStrategy: - return NoAuthStrategy() - - -def get_jwt_strategy() -> JWTStrategy: - return JWTStrategy(secret=secret, lifetime_seconds=None) # type: ignore - - -noauth_authentication = AuthenticationBackend( - name="noauth", - transport=noauth_transport, - get_strategy=get_noauth_strategy, -) -cookie_authentication = AuthenticationBackend( - name="cookie", - transport=cookie_transport, - get_strategy=get_jwt_strategy, -) -github_cookie_authentication = AuthenticationBackend( - name="github", - transport=github_transport, - get_strategy=get_jwt_strategy, -) -github_authentication = GitHubOAuth2( - get_auth_config().client_id, get_auth_config().client_secret.get_secret_value() -) - - -class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): - async def on_after_register(self, user: User, request: Optional[Request] = None): - for oauth_account in user.oauth_accounts: - if oauth_account.oauth_name == "github": - async with httpx.AsyncClient() as client: - r = ( - await client.get(f"https://api.github.com/user/{oauth_account.account_id}") - ).json() - - await self.user_db.update( - user, - dict( - anonymous=False, - username=r["login"], - color=None, - avatar_url=r["avatar_url"], - is_active=True, - ), - ) - - -async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(get_user_db)): - yield UserManager(user_db) - - -async def get_enabled_backends( - auth_config=Depends(get_auth_config), lab_config=Depends(get_lab_config) -): - if auth_config.mode == "noauth" and not lab_config.collaborative: - return [noauth_authentication, github_cookie_authentication] - else: - return [cookie_authentication, github_cookie_authentication] - - -fapi_users = FastAPIUsers[User, uuid.UUID]( - get_user_manager, - [noauth_authentication, cookie_authentication, github_cookie_authentication], -) + noauth_authentication = AuthenticationBackend( + name="noauth", + transport=NoAuthTransport(), + get_strategy=get_noauth_strategy, + ) + cookie_authentication = AuthenticationBackend( + name="cookie", + transport=CookieTransport(cookie_secure=auth_config.cookie_secure), + get_strategy=get_jwt_strategy, + ) -async def create_guest(user_manager, auth_config): - # workspace and settings are copied from global user - # but this is a new user - global_user = await user_manager.get_by_email(auth_config.global_email) - user_id = str(uuid.uuid4()) - guest = dict( - anonymous=True, - email=f"{user_id}@jupyter.com", - username=f"{user_id}@jupyter.com", - password="", - workspace=global_user.workspace, - settings=global_user.settings, - permissions={}, + github_cookie_authentication = AuthenticationBackend( + name="github", + transport=GitHubTransport(), + get_strategy=get_jwt_strategy, ) - return await user_manager.create(UserCreate(**guest)) - - -def current_user(permissions: Optional[Dict[str, List[str]]] = None): - async def _( - response: Response, - token: Optional[str] = None, - user: Optional[User] = Depends( - fapi_users.current_user(optional=True, get_enabled_backends=get_enabled_backends) - ), - user_manager: UserManager = Depends(get_user_manager), - auth_config=Depends(get_auth_config), - lab_config=Depends(get_lab_config), - ): - if auth_config.mode == "user": - # "user" authentication: check authorization - if user and permissions: - for resource, actions in permissions.items(): - user_actions_for_resource = user.permissions.get(resource, []) - if not all([a in user_actions_for_resource for a in actions]): - user = None - break + + github_authentication = GitHubOAuth2(auth_config.client_id, auth_config.client_secret) + + class UserManager(UUIDIDMixin, BaseUserManager[User, uuid.UUID]): + async def on_after_register(self, user: User, request: Optional[Request] = None): + for oauth_account in user.oauth_accounts: + if oauth_account.oauth_name == "github": + async with httpx.AsyncClient() as client: + r = ( + await client.get( + f"https://api.github.com/user/{oauth_account.account_id}" + ) + ).json() + + await self.user_db.update( + user, + dict( + anonymous=False, + username=r["login"], + color=None, + avatar_url=r["avatar_url"], + is_active=True, + ), + ) + + async def get_user_manager(user_db: SQLAlchemyUserDatabase = Depends(db.get_user_db)): + yield UserManager(user_db) + + def get_enabled_backends(): + if auth_config.mode == "noauth" and not frontend_config.collaborative: + res = [noauth_authentication, github_cookie_authentication] else: - # "noauth" or "token" authentication - if lab_config.collaborative: - if not user and auth_config.mode == "noauth": - user = await create_guest(user_manager, auth_config) - await cookie_authentication.login(get_jwt_strategy(), user, response) - - elif not user and auth_config.mode == "token": - global_user = await user_manager.get_by_email(auth_config.global_email) - if global_user and global_user.username == token: - user = await create_guest(user_manager, auth_config) - await cookie_authentication.login(get_jwt_strategy(), user, response) + res = [cookie_authentication, github_cookie_authentication] + return res + + fapi_users = FastAPIUsers[User, uuid.UUID]( + get_user_manager, + [ + noauth_authentication, + cookie_authentication, + github_cookie_authentication, + ], + ) + + async def create_guest(user_manager): + # workspace and settings are copied from global user + # but this is a new user + global_user = await user_manager.get_by_email(auth_config.global_email) + user_id = str(uuid.uuid4()) + guest = dict( + anonymous=True, + email=f"{user_id}@jupyter.com", + username=f"{user_id}@jupyter.com", + password="", + workspace=global_user.workspace, + settings=global_user.settings, + permissions={}, + ) + return await user_manager.create(UserCreate(**guest)) + + def current_user(permissions: Optional[Dict[str, List[str]]] = None): + async def _( + response: Response, + token: Optional[str] = None, + user: Optional[User] = Depends( + fapi_users.current_user(optional=True, get_enabled_backends=get_enabled_backends) + ), + user_manager: BaseUserManager[User, models.ID] = Depends(get_user_manager), + ): + if auth_config.mode == "user": + # "user" authentication: check authorization + if user and permissions: + for resource, actions in permissions.items(): + user_actions_for_resource = user.permissions.get(resource, []) + if not all([a in user_actions_for_resource for a in actions]): + user = None + break else: - if auth_config.mode == "token": - global_user = await user_manager.get_by_email(auth_config.global_email) - if global_user and global_user.username == token: - user = global_user + # "noauth" or "token" authentication + if frontend_config.collaborative: + if not user and auth_config.mode == "noauth": + user = await create_guest(user_manager) await cookie_authentication.login(get_jwt_strategy(), user, response) - if user: - return user - - elif auth_config.login_url: - raise RedirectException(auth_config.login_url) + elif not user and auth_config.mode == "token": + global_user = await user_manager.get_by_email(auth_config.global_email) + if global_user and global_user.username == token: + user = await create_guest(user_manager) + await cookie_authentication.login(get_jwt_strategy(), user, response) + else: + if auth_config.mode == "token": + global_user = await user_manager.get_by_email(auth_config.global_email) + if global_user and global_user.username == token: + user = global_user + await cookie_authentication.login(get_jwt_strategy(), user, response) - else: - raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) - - return _ - - -def websocket_auth(permissions: Optional[Dict[str, List[str]]] = None): - """ - A function returning a dependency for the WebSocket connection. - - :param permissions: the permissions the user should be granted access to. The user should have - access to at least one of them for the WebSocket to be opened. - :returns: a dependency for the WebSocket connection. The dependency returns a tuple consisting - of the websocket and the checked user permissions if the websocket is accepted, None otherwise. - """ - - async def _( - websocket: WebSocket, - auth_config=Depends(get_auth_config), - user_manager: UserManager = Depends(get_user_manager), - ) -> Optional[Tuple[WebSocket, Optional[Dict[str, List[str]]]]]: - accept_websocket = False - checked_permissions: Optional[Dict[str, List[str]]] = None - if auth_config.mode == "noauth": - accept_websocket = True - elif "fastapiusersauth" in websocket._cookies: - token = websocket._cookies["fastapiusersauth"] - user = await get_jwt_strategy().read_token(token, user_manager) if user: - if auth_config.mode == "user": - # "user" authentication: check authorization - if permissions is None: - accept_websocket = True - else: - checked_permissions = {} - for resource, actions in permissions.items(): - user_actions_for_resource = user.permissions.get(resource) - if user_actions_for_resource is None: - continue - allowed = checked_permissions[resource] = [] - for action in actions: - if action in user_actions_for_resource: - allowed.append(action) - accept_websocket = True - else: - accept_websocket = True - if accept_websocket: - return websocket, checked_permissions - else: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION) - return None + return user + + elif auth_config.login_url: + raise RedirectException(auth_config.login_url) - return _ + else: + raise HTTPException(status_code=status.HTTP_403_FORBIDDEN) + + return _ + + def websocket_auth(permissions: Optional[Dict[str, List[str]]] = None): + """ + A function returning a dependency for the WebSocket connection. + + :param permissions: the permissions the user should be granted access to. The user should + have access to at least one of them for the WebSocket to be opened. + :returns: a dependency for the WebSocket connection. The dependency returns a tuple + consisting of the websocket and the checked user permissions if the websocket is accepted, + None otherwise. + """ + + async def _( + websocket: WebSocket, + user_manager: BaseUserManager[models.UP, models.ID] = Depends(get_user_manager), + ) -> Optional[Tuple[WebSocket, Optional[Dict[str, List[str]]]]]: + accept_websocket = False + checked_permissions: Optional[Dict[str, List[str]]] = None + if auth_config.mode == "noauth": + accept_websocket = True + elif "fastapiusersauth" in websocket._cookies: + token = websocket._cookies["fastapiusersauth"] + user = await get_jwt_strategy().read_token(token, user_manager) + if user: + if auth_config.mode == "user": + # "user" authentication: check authorization + if permissions is None: + accept_websocket = True + else: + checked_permissions = {} + for resource, actions in permissions.items(): + user_actions_for_resource = user.permissions.get(resource) + if user_actions_for_resource is None: + continue + allowed = checked_permissions[resource] = [] + for action in actions: + if action in user_actions_for_resource: + allowed.append(action) + accept_websocket = True + else: + accept_websocket = True + if accept_websocket: + return websocket, checked_permissions + else: + await websocket.close(code=status.WS_1008_POLICY_VIOLATION) + return None + return _ -async def update_user( - user: UserRead = Depends(current_user()), user_db: SQLAlchemyUserDatabase = Depends(get_user_db) -): - async def _(data: Dict[str, Any]) -> UserRead: - await user_db.update(user, data) - return user + async def update_user( + user: UserRead = Depends(current_user()), + user_db: SQLAlchemyUserDatabase = Depends(db.get_user_db), + ): + async def _(data: Dict[str, Any]) -> UserRead: + await user_db.update(user, data) + return user - return _ + return _ + + return Res( + cookie_authentication=cookie_authentication, + current_user=current_user, + update_user=update_user, + fapi_users=fapi_users, + get_user_manager=get_user_manager, + github_authentication=github_authentication, + github_cookie_authentication=github_cookie_authentication, + websocket_auth=websocket_auth, + ) diff --git a/plugins/auth/fps_auth/config.py b/plugins/auth/fps_auth/config.py index 715e7efe..69d08caa 100644 --- a/plugins/auth/fps_auth/config.py +++ b/plugins/auth/fps_auth/config.py @@ -1,30 +1,18 @@ from typing import Optional from uuid import uuid4 -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore -from pydantic import BaseSettings, SecretStr +from jupyverse_api.auth import AuthConfig -class AuthConfig(PluginModel, BaseSettings): +class _AuthConfig(AuthConfig): client_id: str = "" - client_secret: SecretStr = SecretStr("") + client_secret: str = "" redirect_uri: str = "" # mode: Literal["noauth", "token", "user"] = "token" mode: str = "token" - token: str = str(uuid4()) + token: str = uuid4().hex global_email: str = "guest@jupyter.com" cookie_secure: bool = False # FIXME: should default to True, and set to False for tests clear_users: bool = False test: bool = False login_url: Optional[str] = None - - class Config(PluginModel.Config): - env_prefix = "fps_auth_" - - -def get_auth_config(): - return get_config(AuthConfig) - - -c = register_config(AuthConfig) diff --git a/plugins/auth/fps_auth/db.py b/plugins/auth/fps_auth/db.py index bd7b5865..1e40c181 100644 --- a/plugins/auth/fps_auth/db.py +++ b/plugins/auth/fps_auth/db.py @@ -1,44 +1,25 @@ +import logging import secrets +from dataclasses import dataclass from pathlib import Path -from typing import AsyncGenerator, List +from typing import Any, AsyncGenerator, List from fastapi import Depends -from fastapi_users.db import SQLAlchemyBaseOAuthAccountTableUUID # type: ignore -from fastapi_users.db import ( # type: ignore +from fastapi_users.db import SQLAlchemyBaseOAuthAccountTableUUID +from fastapi_users.db import ( SQLAlchemyBaseUserTableUUID, SQLAlchemyUserDatabase, ) -from fps.config import get_config # type: ignore from sqlalchemy import JSON, Boolean, Column, String, Text # type: ignore from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine # type: ignore from sqlalchemy.ext.declarative import DeclarativeMeta, declarative_base # type: ignore from sqlalchemy.orm import relationship, sessionmaker # type: ignore -from .config import AuthConfig +from .config import _AuthConfig -auth_config = get_config(AuthConfig) -jupyter_dir = Path.home() / ".local" / "share" / "jupyter" -jupyter_dir.mkdir(parents=True, exist_ok=True) -name = "jupyverse" -if auth_config.test: - name += "_test" -secret_path = jupyter_dir / f"{name}_secret" -userdb_path = jupyter_dir / f"{name}_users.db" +logger = logging.getLogger("auth") -if auth_config.clear_users: - if userdb_path.is_file(): - userdb_path.unlink() - if secret_path.is_file(): - secret_path.unlink() - -if not secret_path.is_file(): - secret_path.write_text(secrets.token_hex(32)) - -secret = secret_path.read_text() - - -DATABASE_URL = f"sqlite+aiosqlite:///{userdb_path}" Base: DeclarativeMeta = declarative_base() @@ -61,19 +42,57 @@ class User(SQLAlchemyBaseUserTableUUID, Base): oauth_accounts: List[OAuthAccount] = relationship("OAuthAccount", lazy="joined") -engine = create_async_engine(DATABASE_URL) -async_session_maker = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) - - -async def create_db_and_tables(): - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - -async def get_async_session() -> AsyncGenerator[AsyncSession, None]: - async with async_session_maker() as session: - yield session - - -async def get_user_db(session: AsyncSession = Depends(get_async_session)): - yield SQLAlchemyUserDatabase(session, User, OAuthAccount) +@dataclass +class Res: + User: Any + async_session_maker: Any + create_db_and_tables: Any + get_async_session: Any + get_user_db: Any + secret: Any + + +def get_db(auth_config: _AuthConfig) -> Res: + jupyter_dir = Path.home() / ".local" / "share" / "jupyter" + jupyter_dir.mkdir(parents=True, exist_ok=True) + name = "jupyverse" + if auth_config.test: + name += "_test" + secret_path = jupyter_dir / f"{name}_secret" + userdb_path = jupyter_dir / f"{name}_users.db" + + if auth_config.clear_users: + if userdb_path.is_file(): + userdb_path.unlink() + if secret_path.is_file(): + secret_path.unlink() + + if not secret_path.is_file(): + secret_path.write_text(secrets.token_hex(32)) + + secret = secret_path.read_text() + + database_url = f"sqlite+aiosqlite:///{userdb_path}" + + engine = create_async_engine(database_url) + async_session_maker = sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + + async def create_db_and_tables(): + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + async def get_async_session() -> AsyncGenerator[AsyncSession, None]: + async with async_session_maker() as session: + yield session + + async def get_user_db(session: AsyncSession = Depends(get_async_session)): + yield SQLAlchemyUserDatabase(session, User, OAuthAccount) + + return Res( + User=User, + async_session_maker=async_session_maker, + create_db_and_tables=create_db_and_tables, + get_async_session=get_async_session, + get_user_db=get_user_db, + secret=secret, + ) diff --git a/plugins/auth/fps_auth/fixtures.py b/plugins/auth/fps_auth/fixtures.py deleted file mode 100644 index 94956c6d..00000000 --- a/plugins/auth/fps_auth/fixtures.py +++ /dev/null @@ -1,84 +0,0 @@ -from uuid import uuid4 - -import pytest # type: ignore - -from .config import AuthConfig, get_auth_config - - -@pytest.fixture -def auth_mode(): - return "token" - - -@pytest.fixture -def auth_config(auth_mode): - yield AuthConfig.parse_obj({"mode": auth_mode, "test": True}) - - -@pytest.fixture -def config_override(app, auth_config): - async def override_get_config(): - return auth_config - - app.dependency_overrides[get_auth_config] = override_get_config - - -@pytest.fixture() -def permissions(): - return {} - - -@pytest.fixture() -def authenticated_client(client, permissions): - # create a new user - username = uuid4().hex - # if logged in, log out - first_time = True - while True: - response = client.get("/api/me") - if response.status_code == 403: - break - assert first_time - response = client.post("/auth/logout") - assert response.status_code == 200 - first_time = False - - # register user - register_body = { - "email": username + "@example.com", - "password": username, - "username": username, - "permissions": permissions, - } - response = client.post("/auth/register", json=register_body) - # check that we cannot register if not logged in - assert response.status_code == 403 - # log in as admin - login_body = {"username": "admin@jupyter.com", "password": "jupyverse"} - response = client.post("/auth/login", data=login_body) - assert response.status_code == 200 - # register user - response = client.post("/auth/register", json=register_body) - assert response.status_code == 201 - - # log out - response = client.post("/auth/logout") - assert response.status_code == 200 - # check that we can't get our identity, since we're not logged in - response = client.get("/api/me") - assert response.status_code == 403 - - # log in with registered user - login_body = {"username": username + "@example.com", "password": username} - response = client.post("/auth/login", data=login_body) - assert response.status_code == 200 - # we should now have a cookie - assert "fastapiusersauth" in client.cookies - # check our identity, since we're logged in - response = client.get("/api/me", params={"permissions": permissions}) - assert response.status_code == 200 - me = response.json() - assert me["identity"]["username"] == username - # check our permissions - assert me["permissions"] == permissions - yield client diff --git a/plugins/auth/fps_auth/main.py b/plugins/auth/fps_auth/main.py new file mode 100644 index 00000000..6c42a956 --- /dev/null +++ b/plugins/auth/fps_auth/main.py @@ -0,0 +1,64 @@ +import logging + +from asphalt.core import Component, Context +from fastapi_users.exceptions import UserAlreadyExists +from jupyverse_api.auth import Auth, AuthConfig +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.app import App + +from .config import _AuthConfig +from .routes import auth_factory + + +logger = logging.getLogger("auth") + + +class AuthComponent(Component): + def __init__(self, **kwargs): + self.auth_config = _AuthConfig(**kwargs) + + async def start( + self, + ctx: Context, + ) -> None: + ctx.add_resource(self.auth_config, types=AuthConfig) + + app = await ctx.request_resource(App) + frontend_config = await ctx.request_resource(FrontendConfig) + + auth = auth_factory(app, self.auth_config, frontend_config) + ctx.add_resource(auth, types=Auth) + + await auth.db.create_db_and_tables() + + if self.auth_config.test: + try: + await auth.create_user( + username="admin@jupyter.com", + email="admin@jupyter.com", + password="jupyverse", + permissions={"admin": ["read", "write"]}, + ) + except UserAlreadyExists: + pass + + try: + await auth.create_user( + username=self.auth_config.token, + email=self.auth_config.global_email, + password="", + permissions={}, + ) + except UserAlreadyExists: + global_user = await auth.get_user_by_email(self.auth_config.global_email) + await auth._update_user( + global_user, + username=self.auth_config.token, + permissions={}, + ) + + if self.auth_config.mode == "token": + logger.info("") + logger.info("To access the server, copy and paste this URL:") + logger.info(f"http://127.0.0.1:8000/?token={self.auth_config.token}") + logger.info("") diff --git a/plugins/auth/fps_auth/models.py b/plugins/auth/fps_auth/models.py index 4354d008..92295dcf 100644 --- a/plugins/auth/fps_auth/models.py +++ b/plugins/auth/fps_auth/models.py @@ -1,24 +1,13 @@ import uuid -from typing import Dict, List, Optional +from typing import Dict, List from fastapi_users import schemas -from pydantic import BaseModel +from jupyverse_api.auth import User -class Permissions(BaseModel): - permissions: Dict[str, List[str]] - - -class JupyterUser(Permissions): +class JupyterUser(User): anonymous: bool = True - username: str = "" - name: str = "" - display_name: str = "" - initials: Optional[str] = None - color: Optional[str] = None - avatar_url: Optional[str] = None - workspace: str = "{}" - settings: str = "{}" + permissions: Dict[str, List[str]] class UserRead(schemas.BaseUser[uuid.UUID], JupyterUser): diff --git a/plugins/auth/fps_auth/py.typed b/plugins/auth/fps_auth/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/plugins/auth/fps_auth/routes.py b/plugins/auth/fps_auth/routes.py index a71d2eb7..fb1bd8ca 100644 --- a/plugins/auth/fps_auth/routes.py +++ b/plugins/auth/fps_auth/routes.py @@ -1,171 +1,150 @@ import contextlib import json -from typing import Dict, List +import logging +from typing import Any, Callable, Dict, List, Optional, Tuple from fastapi import APIRouter, Depends, Request -from fastapi_users.exceptions import UserAlreadyExists -from fps.config import get_config # type: ignore -from fps.hooks import register_router # type: ignore -from fps.logging import get_configured_logger # type: ignore -from fps_uvicorn.cli import add_query_params # type: ignore -from fps_uvicorn.config import UvicornConfig # type: ignore +from jupyverse_api import Router +from jupyverse_api.app import App +from jupyverse_api.auth import Auth +from jupyverse_api.frontend import FrontendConfig from sqlalchemy import select # type: ignore -from .backends import ( - cookie_authentication, - current_user, - fapi_users, - get_user_manager, - github_authentication, - github_cookie_authentication, -) -from .config import get_auth_config -from .db import ( - User, - async_session_maker, - create_db_and_tables, - get_async_session, - get_user_db, - secret, -) -from .models import UserCreate, UserRead, UserUpdate - -logger = get_configured_logger("auth") - -auth_config = get_auth_config() -if auth_config.mode == "token": - add_query_params({"token": auth_config.token}) - -router = APIRouter() - - -get_async_session_context = contextlib.asynccontextmanager(get_async_session) -get_user_db_context = contextlib.asynccontextmanager(get_user_db) -get_user_manager_context = contextlib.asynccontextmanager(get_user_manager) +from .backends import get_backend +from .config import _AuthConfig +from .db import get_db +from .models import UserCreate, UserRead, UserUpdate -@contextlib.asynccontextmanager -async def _get_user_manager(): - async with get_async_session_context() as session: - async with get_user_db_context(session) as user_db: - async with get_user_manager_context(user_db) as user_manager: - yield user_manager +logger = logging.getLogger("auth") -async def create_user(**kwargs): - async with _get_user_manager() as user_manager: - await user_manager.create(UserCreate(**kwargs)) +def auth_factory( + app: App, + auth_config: _AuthConfig, + frontend_config: FrontendConfig, +): + db = get_db(auth_config) + backend = get_backend(auth_config, frontend_config, db) + + get_async_session_context = contextlib.asynccontextmanager(db.get_async_session) + get_user_db_context = contextlib.asynccontextmanager(db.get_user_db) + get_user_manager_context = contextlib.asynccontextmanager(backend.get_user_manager) + + @contextlib.asynccontextmanager + async def _get_user_manager(): + async with get_async_session_context() as session: + async with get_user_db_context(session) as user_db: + async with get_user_manager_context(user_db) as user_manager: + yield user_manager + + async def create_user(**kwargs): + async with _get_user_manager() as user_manager: + await user_manager.create(UserCreate(**kwargs)) + + async def update_user(user, **kwargs): + async with _get_user_manager() as user_manager: + await user_manager.update(UserUpdate(**kwargs), user) + + async def get_user_by_email(user_email): + async with _get_user_manager() as user_manager: + return await user_manager.get_by_email(user_email) + + class _Auth(Auth, Router): + def __init__(self) -> None: + super().__init__(app) + + self.db = db + + router = APIRouter() + + @router.get("/auth/users") + async def get_users( + user: UserRead = Depends(backend.current_user(permissions={"admin": ["read"]})), + ): + async with db.async_session_maker() as session: + statement = select(db.User) + users = (await session.execute(statement)).unique().all() + return [usr.User for usr in users if usr.User.is_active] + + @router.get("/api/me") + async def get_api_me( + request: Request, + user: UserRead = Depends(backend.current_user()), + ): + checked_permissions: Dict[str, List[str]] = {} + permissions = json.loads( + dict(request.query_params).get("permissions", "{}").replace("'", '"') + ) + if permissions: + user_permissions = user.permissions + for resource, actions in permissions.items(): + user_resource_permissions = user_permissions.get(resource) + if user_resource_permissions is None: + continue + allowed = checked_permissions[resource] = [] + for action in actions: + if action in user_resource_permissions: + allowed.append(action) + + keys = ["username", "name", "display_name", "initials", "avatar_url", "color"] + identity = {k: getattr(user, k) for k in keys} + return { + "identity": identity, + "permissions": checked_permissions, + } + + # redefine GET /me because we want our current_user dependency + # it is first defined in users_router and so it wins over the one in + # fapi_users.get_users_router + users_router = APIRouter() + + @users_router.get("/me") + async def get_me( + user: UserRead = Depends(backend.current_user(permissions={"admin": ["read"]})), + ): + return user + + users_router.include_router(backend.fapi_users.get_users_router(UserRead, UserUpdate)) + + # Cookie based auth login and logout + self.include_router( + backend.fapi_users.get_auth_router(backend.cookie_authentication), prefix="/auth" + ) + self.include_router( + backend.fapi_users.get_register_router(UserRead, UserCreate), + prefix="/auth", + dependencies=[Depends(backend.current_user(permissions={"admin": ["write"]}))], + ) + self.include_router(users_router, prefix="/auth/user") + + # GitHub OAuth register router + self.include_router( + backend.fapi_users.get_oauth_router( + backend.github_authentication, backend.github_cookie_authentication, db.secret + ), + prefix="/auth/github", + ) + self.include_router(router) -async def update_user(user, **kwargs): - async with _get_user_manager() as user_manager: - await user_manager.update(UserUpdate(**kwargs), user) + self.create_user = create_user + self.__update_user = update_user + self.get_user_by_email = get_user_by_email + async def _update_user(self, user, **kwargs): + return await self.__update_user(user, **kwargs) -async def get_user_by_email(user_email): - async with _get_user_manager() as user_manager: - return await user_manager.get_by_email(user_email) + def current_user(self, permissions: Optional[Dict[str, List[str]]] = None) -> Callable: + return backend.current_user(permissions) + async def update_user(self, update_user=Depends(backend.update_user)) -> Callable: + return update_user -@router.on_event("startup") -async def startup(): - await create_db_and_tables() + def websocket_auth( + self, + permissions: Optional[Dict[str, List[str]]] = None, + ) -> Callable[[], Tuple[Any, Dict[str, List[str]]]]: + return backend.websocket_auth(permissions) - if auth_config.test: - try: - await create_user( - username="admin@jupyter.com", - email="admin@jupyter.com", - password="jupyverse", - permissions={"admin": ["read", "write"]}, - ) - except UserAlreadyExists: - pass - - try: - await create_user( - username=auth_config.token, - email=auth_config.global_email, - password="", - permissions={}, - ) - except UserAlreadyExists: - global_user = await get_user_by_email(auth_config.global_email) - await update_user( - global_user, - username=auth_config.token, - permissions={}, - ) - - if auth_config.mode == "token": - uvicorn_config = get_config(UvicornConfig) - logger.info("") - logger.info("To access the server, copy and paste this URL:") - logger.info( - f"http://{uvicorn_config.host}:{uvicorn_config.port}/?token={auth_config.token}" - ) - logger.info("") - - -@router.get("/auth/users") -async def get_users(user: UserRead = Depends(current_user(permissions={"admin": ["read"]}))): - async with async_session_maker() as session: - statement = select(User) - users = (await session.execute(statement)).unique().all() - return [usr.User for usr in users if usr.User.is_active] - - -@router.get("/api/me") -async def get_api_me( - request: Request, - user: UserRead = Depends(current_user()), -): - checked_permissions: Dict[str, List[str]] = {} - permissions = json.loads(dict(request.query_params).get("permissions", "{}").replace("'", '"')) - if permissions: - user_permissions = user.permissions - for resource, actions in permissions.items(): - user_resource_permissions = user_permissions.get(resource) - if user_resource_permissions is None: - continue - allowed = checked_permissions[resource] = [] - for action in actions: - if action in user_resource_permissions: - allowed.append(action) - - keys = ["username", "name", "display_name", "initials", "avatar_url", "color"] - identity = {k: getattr(user, k) for k in keys} - return { - "identity": identity, - "permissions": checked_permissions, - } - - -# redefine GET /me because we want our current_user dependency -# it is first defined in users_router and so it wins over the one in fapi_users.get_users_router -users_router = APIRouter() - - -@users_router.get("/me") -async def get_me(user: UserRead = Depends(current_user(permissions={"admin": ["read"]}))): - return user - - -users_router.include_router(fapi_users.get_users_router(UserRead, UserUpdate)) - -# Cookie based auth login and logout -r_cookie_auth = register_router(fapi_users.get_auth_router(cookie_authentication), prefix="/auth") -r_register = register_router( - fapi_users.get_register_router(UserRead, UserCreate), - prefix="/auth", - dependencies=[Depends(current_user(permissions={"admin": ["write"]}))], -) -r_user = register_router(users_router, prefix="/auth/user") - -# GitHub OAuth register router -r_github = register_router( - fapi_users.get_oauth_router(github_authentication, github_cookie_authentication, secret), - prefix="/auth/github", -) - -r = register_router(router) + return _Auth() diff --git a/plugins/auth/pyproject.toml b/plugins/auth/pyproject.toml index be75444e..60e8b9c5 100644 --- a/plugins/auth/pyproject.toml +++ b/plugins/auth/pyproject.toml @@ -5,13 +5,10 @@ build-backend = "hatchling.build" [project] name = "fps_auth" description = "An FPS plugin for the authentication API" -keywords = ["jupyter", "server", "fastapi", "pluggy", "plugins"] +keywords = ["jupyter", "server", "fastapi", "plugins"] dynamic = ["version"] requires-python = ">=3.8" dependencies = [ - "fps[uvicorn] >=0.0.17", - "fps-lab", - "fps-login", "aiosqlite", "fastapi-users[sqlalchemy,oauth] >=10.1.4,<11", "sqlalchemy >=1,<2", @@ -37,17 +34,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-auth = "fps_auth.routes" - -[project.entry-points.fps_config] -fps-auth = "fps_auth.config" - -[project.entry-points.jupyverse_auth] -User = "fps_auth.models:UserRead" -current_user = "fps_auth.backends:current_user" -update_user = "fps_auth.backends:update_user" -websocket_auth = "fps_auth.backends:websocket_auth" +[project.entry-points."asphalt.components"] +auth = "fps_auth.main:AuthComponent" [tool.hatch.version] path = "fps_auth/__init__.py" diff --git a/plugins/auth_base/MANIFEST.in b/plugins/auth_base/MANIFEST.in deleted file mode 100644 index efa752ea..00000000 --- a/plugins/auth_base/MANIFEST.in +++ /dev/null @@ -1 +0,0 @@ -include *.md diff --git a/plugins/auth_base/README.md b/plugins/auth_base/README.md deleted file mode 100644 index 9abafd16..00000000 --- a/plugins/auth_base/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# fps-auth-base - -An FPS plugin for the authentication API. diff --git a/plugins/auth_base/fps_auth_base/__init__.py b/plugins/auth_base/fps_auth_base/__init__.py deleted file mode 100644 index 40495029..00000000 --- a/plugins/auth_base/fps_auth_base/__init__.py +++ /dev/null @@ -1,15 +0,0 @@ -import pkg_resources - -__version__ = "0.0.50" - -auth = {ep.name: ep.load() for ep in pkg_resources.iter_entry_points(group="jupyverse_auth")} - -try: - User = auth["User"] - current_user = auth["current_user"] - update_user = auth["update_user"] - websocket_auth = auth["websocket_auth"] -except KeyError: - raise RuntimeError( - "An auth plugin must be installed, for instance: pip install fps-auth", - ) diff --git a/plugins/auth_base/pyproject.toml b/plugins/auth_base/pyproject.toml deleted file mode 100644 index 3f063ca4..00000000 --- a/plugins/auth_base/pyproject.toml +++ /dev/null @@ -1,34 +0,0 @@ -[build-system] -requires = [ "hatchling",] -build-backend = "hatchling.build" - -[project] -name = "fps_auth_base" -description = "An FPS plugin for the authentication API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] -requires-python = ">=3.8" -dependencies = ["fps >=0.0.17"] -dynamic = [ "version",] - -[[project.authors]] -name = "Jupyter Development Team" -email = "jupyter@googlegroups.com" - -[project.readme] -file = "README.md" -content-type = "text/markdown" - -[project.license] -text = "BSD 3-Clause License" - -[project.urls] -Homepage = "https://jupyter.org" - -[tool.check-manifest] -ignore = [ ".*",] - -[tool.jupyter-releaser] -skip = [ "check-links",] - -[tool.hatch.version] -path = "fps_auth_base/__init__.py" diff --git a/plugins/auth_fief/fps_auth_fief/backend.py b/plugins/auth_fief/fps_auth_fief/backend.py index a4d398aa..05c09871 100644 --- a/plugins/auth_fief/fps_auth_fief/backend.py +++ b/plugins/auth_fief/fps_auth_fief/backend.py @@ -4,89 +4,93 @@ from fastapi.security import APIKeyCookie from fief_client import FiefAccessTokenInfo, FiefAsync, FiefUserInfo from fief_client.integrations.fastapi import FiefAuth +from jupyverse_api.auth import User -from .config import get_auth_fief_config -from .models import UserRead +from .config import _AuthFiefConfig -class CustomFiefAuth(FiefAuth): - client: FiefAsync +class Backend: + def __init__(self, auth_fief_config: _AuthFiefConfig): + class CustomFiefAuth(FiefAuth): + client: FiefAsync - async def get_unauthorized_response(self, request: Request, response: Response): - redirect_uri = request.url_for("auth_callback") - auth_url = await self.client.auth_url(redirect_uri, scope=["openid"]) - raise HTTPException( - status_code=status.HTTP_307_TEMPORARY_REDIRECT, - headers={"Location": auth_url}, - ) - - -auth_fief_config = get_auth_fief_config() - -fief = FiefAsync( - auth_fief_config.base_url, - auth_fief_config.client_id, - auth_fief_config.client_secret.get_secret_value(), -) - -SESSION_COOKIE_NAME = "fps_auth_fief_user_session" -scheme = APIKeyCookie(name=SESSION_COOKIE_NAME, auto_error=False) -auth = CustomFiefAuth(fief, scheme) - - -async def update_user( - user: FiefUserInfo = Depends(auth.current_user()), - access_token_info: FiefAccessTokenInfo = Depends(auth.authenticated()), -): - async def _(data: Dict[str, Any]) -> FiefUserInfo: - user = await fief.update_profile(access_token_info["access_token"], {"fields": data}) - return user + async def get_unauthorized_response(self, request: Request, response: Response): + redirect_uri = str(request.url_for("auth_callback")) + auth_url = await self.client.auth_url(redirect_uri, scope=["openid"]) + raise HTTPException( + status_code=status.HTTP_307_TEMPORARY_REDIRECT, + headers={"Location": auth_url}, + ) - return _ - - -def websocket_auth(permissions: Optional[Dict[str, List[str]]] = None): - async def _( - websocket: WebSocket, - ) -> Optional[Tuple[WebSocket, Optional[Dict[str, List[str]]]]]: - accept_websocket = False - checked_permissions: Optional[Dict[str, List[str]]] = None - if SESSION_COOKIE_NAME in websocket._cookies: - access_token = websocket._cookies[SESSION_COOKIE_NAME] - if permissions is None: - accept_websocket = True - else: - checked_permissions = {} - for resource, actions in permissions.items(): - allowed = checked_permissions[resource] = [] - for action in actions: - try: - await fief.validate_access_token( - access_token, required_permissions=[f"{resource}:{action}"] - ) - except BaseException: - pass - else: - allowed.append(action) - accept_websocket = True - if accept_websocket: - return websocket, checked_permissions - else: - await websocket.close(code=status.WS_1008_POLICY_VIOLATION) - return None - - return _ - - -def current_user(permissions=None): - if permissions is not None: - permissions = [ - f"{resource}:{action}" - for resource, actions in permissions.items() - for action in actions - ] - - async def _(user: FiefUserInfo = Depends(auth.current_user(permissions=permissions))): - return UserRead(**user["fields"]) + self.fief = FiefAsync( + auth_fief_config.base_url, + auth_fief_config.client_id, + auth_fief_config.client_secret, + ) - return _ + self.SESSION_COOKIE_NAME = "fps_auth_fief_user_session" + scheme = APIKeyCookie(name=self.SESSION_COOKIE_NAME, auto_error=False) + self.auth = CustomFiefAuth(self.fief, scheme) + + async def update_user( + user: FiefUserInfo = Depends(self.auth.current_user()), + access_token_info: FiefAccessTokenInfo = Depends(self.auth.authenticated()), + ): + async def _(data: Dict[str, Any]) -> FiefUserInfo: + user = await self.fief.update_profile( + access_token_info["access_token"], {"fields": data} + ) + return user + + return _ + + def websocket_auth(permissions: Optional[Dict[str, List[str]]] = None): + async def _( + websocket: WebSocket, + ) -> Optional[Tuple[WebSocket, Optional[Dict[str, List[str]]]]]: + accept_websocket = False + checked_permissions: Optional[Dict[str, List[str]]] = None + if self.SESSION_COOKIE_NAME in websocket._cookies: + access_token = websocket._cookies[self.SESSION_COOKIE_NAME] + if permissions is None: + accept_websocket = True + else: + checked_permissions = {} + for resource, actions in permissions.items(): + allowed = checked_permissions[resource] = [] + for action in actions: + try: + await self.fief.validate_access_token( + access_token, required_permissions=[f"{resource}:{action}"] + ) + except BaseException: + pass + else: + allowed.append(action) + accept_websocket = True + if accept_websocket: + return websocket, checked_permissions + else: + await websocket.close(code=status.WS_1008_POLICY_VIOLATION) + return None + + return _ + + def current_user(permissions=None): + if permissions is not None: + permissions = [ + f"{resource}:{action}" + for resource, actions in permissions.items() + for action in actions + ] + + async def _( + user: FiefUserInfo = Depends(self.auth.current_user(permissions=permissions)), + ): + return User(**user["fields"]) + + return _ + + self.current_user = current_user + self.update_user = update_user + self.websocket_auth = websocket_auth diff --git a/plugins/auth_fief/fps_auth_fief/config.py b/plugins/auth_fief/fps_auth_fief/config.py index eb08e190..09978f8b 100644 --- a/plugins/auth_fief/fps_auth_fief/config.py +++ b/plugins/auth_fief/fps_auth_fief/config.py @@ -1,23 +1,7 @@ -from fps.config import PluginModel, get_config -from fps.hooks import register_config -from pydantic import BaseSettings, SecretStr +from jupyverse_api.auth import AuthConfig -class AuthFiefConfig(PluginModel, BaseSettings): +class _AuthFiefConfig(AuthConfig): base_url: str # Base URL of Fief tenant client_id: str # ID of Fief client - client_secret: SecretStr # Secret of Fief client - - class Config(PluginModel.Config): - env_prefix = "fps_auth_fief_" - # config can be set with environment variables, e.g.: - # export FPS_AUTH_FIEF_BASE_URL=https://jupyverse.fief.dev - # export FPS_AUTH_FIEF_CLIENT_ID=my_client_id - # export FPS_AUTH_FIEF_CLIENT_SECRET=my_client_secret - - -def get_auth_fief_config(): - return get_config(AuthFiefConfig) - - -c = register_config(AuthFiefConfig) + client_secret: str # Secret of Fief client diff --git a/plugins/auth_fief/fps_auth_fief/main.py b/plugins/auth_fief/fps_auth_fief/main.py new file mode 100644 index 00000000..f688fa54 --- /dev/null +++ b/plugins/auth_fief/fps_auth_fief/main.py @@ -0,0 +1,22 @@ +from asphalt.core import Component, Context +from jupyverse_api.auth import Auth, AuthConfig +from jupyverse_api.app import App + +from .config import _AuthFiefConfig +from .routes import _AuthFief + + +class AuthFiefComponent(Component): + def __init__(self, **kwargs): + self.auth_fief_config = _AuthFiefConfig(**kwargs) + + async def start( + self, + ctx: Context, + ) -> None: + ctx.add_resource(self.auth_fief_config, types=AuthConfig) + + app = await ctx.request_resource(App) + + auth_fief = _AuthFief(app, self.auth_fief_config) + ctx.add_resource(auth_fief, types=Auth) diff --git a/plugins/auth_fief/fps_auth_fief/models.py b/plugins/auth_fief/fps_auth_fief/models.py deleted file mode 100644 index 89786c57..00000000 --- a/plugins/auth_fief/fps_auth_fief/models.py +++ /dev/null @@ -1,18 +0,0 @@ -from typing import Dict, List, Optional - -from pydantic import BaseModel - - -class Permissions(BaseModel): - permissions: Dict[str, List[str]] - - -class UserRead(BaseModel): - username: str = "" - name: str = "" - display_name: str = "" - initials: Optional[str] = None - color: Optional[str] = None - avatar_url: Optional[str] = None - workspace: str = "{}" - settings: str = "{}" diff --git a/plugins/auth_fief/fps_auth_fief/routes.py b/plugins/auth_fief/fps_auth_fief/routes.py index 6d7cbe81..d388c49d 100644 --- a/plugins/auth_fief/fps_auth_fief/routes.py +++ b/plugins/auth_fief/fps_auth_fief/routes.py @@ -4,59 +4,70 @@ from fastapi import APIRouter, Depends, Query, Request, Response from fastapi.responses import RedirectResponse from fief_client import FiefAccessTokenInfo -from fps.hooks import register_router - -from .backend import SESSION_COOKIE_NAME, auth, current_user, fief -from .models import UserRead - -router = APIRouter() - - -@router.get("/auth-callback", name="auth_callback") -async def auth_callback(request: Request, response: Response, code: str = Query(...)): - redirect_uri = request.url_for("auth_callback") - tokens, _ = await fief.auth_callback(code, redirect_uri) - - response = RedirectResponse(request.url_for("root")) - response.set_cookie( - SESSION_COOKIE_NAME, - tokens["access_token"], - max_age=tokens["expires_in"], - httponly=True, - secure=False, - ) - - return response - - -@router.get("/api/me") -async def get_api_me( - request: Request, - user: UserRead = Depends(current_user()), - access_token_info: FiefAccessTokenInfo = Depends(auth.authenticated()), -): - checked_permissions: Dict[str, List[str]] = {} - permissions = json.loads(dict(request.query_params).get("permissions", "{}").replace("'", '"')) - if permissions: - user_permissions = {} - for permission in access_token_info["permissions"]: - resource, action = permission.split(":") - if resource not in user_permissions.keys(): - user_permissions[resource] = [] - user_permissions[resource].append(action) - for resource, actions in permissions.items(): - user_resource_permissions = user_permissions.get(resource, []) - allowed = checked_permissions[resource] = [] - for action in actions: - if action in user_resource_permissions: - allowed.append(action) - - keys = ["username", "name", "display_name", "initials", "avatar_url", "color"] - identity = {k: getattr(user, k) for k in keys} - return { - "identity": identity, - "permissions": checked_permissions, - } - - -r = register_router(router) +from jupyverse_api import Router +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User + +from .backend import Backend +from .config import _AuthFiefConfig + + +class _AuthFief(Backend, Auth, Router): + def __init__( + self, + app: App, + auth_fief_config: _AuthFiefConfig, + ) -> None: + Router.__init__(self, app) + Backend.__init__(self, auth_fief_config) + + router = APIRouter() + + @router.get("/auth-callback", name="auth_callback") + async def auth_callback(request: Request, response: Response, code: str = Query(...)): + redirect_uri = str(request.url_for("auth_callback")) + tokens, _ = await self.fief.auth_callback(code, redirect_uri) + + response = RedirectResponse(request.url_for("root")) + response.set_cookie( + self.SESSION_COOKIE_NAME, + tokens["access_token"], + max_age=tokens["expires_in"], + httponly=True, + secure=False, + ) + + return response + + @router.get("/api/me") + async def get_api_me( + request: Request, + user: User = Depends(self.current_user()), + access_token_info: FiefAccessTokenInfo = Depends(self.auth.authenticated()), + ): + checked_permissions: Dict[str, List[str]] = {} + permissions = json.loads( + dict(request.query_params).get("permissions", "{}").replace("'", '"') + ) + if permissions: + user_permissions: Dict[str, List[str]] = {} + for permission in access_token_info["permissions"]: + resource, action = permission.split(":") + if resource not in user_permissions: + user_permissions[resource] = [] + user_permissions[resource].append(action) + for resource, actions in permissions.items(): + user_resource_permissions = user_permissions.get(resource, []) + allowed = checked_permissions[resource] = [] + for action in actions: + if action in user_resource_permissions: + allowed.append(action) + + keys = ["username", "name", "display_name", "initials", "avatar_url", "color"] + identity = {k: getattr(user, k) for k in keys} + return { + "identity": identity, + "permissions": checked_permissions, + } + + self.include_router(router) diff --git a/plugins/auth_fief/pyproject.toml b/plugins/auth_fief/pyproject.toml index 7ed0d773..60f20834 100644 --- a/plugins/auth_fief/pyproject.toml +++ b/plugins/auth_fief/pyproject.toml @@ -5,12 +5,11 @@ build-backend = "hatchling.build" [project] name = "fps_auth_fief" description = "An FPS plugin for the authentication API, using Fief" -keywords = ["jupyter", "server", "fastapi", "pluggy", "plugins"] +keywords = ["jupyter", "server", "fastapi", "plugins"] dynamic = ["version"] requires-python = ">=3.8" dependencies = [ "fief-client[fastapi]", - "fps >=0.0.8" ] [[project.authors]] @@ -33,11 +32,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-auth-fief = "fps_auth_fief.routes" - -[project.entry-points.fps_config] -fps-auth-fief = "fps_auth_fief.config" +[project.entry-points."asphalt.components"] +auth_fief = "fps_auth_fief.main:AuthFiefComponent" [project.entry-points.jupyverse_auth] User = "fps_auth_fief.models:UserRead" diff --git a/plugins/contents/fps_contents/fileid.py b/plugins/contents/fps_contents/fileid.py index b2510426..89b20a93 100644 --- a/plugins/contents/fps_contents/fileid.py +++ b/plugins/contents/fps_contents/fileid.py @@ -1,14 +1,14 @@ import asyncio +import logging from typing import Dict, List, Optional from uuid import uuid4 import aiosqlite from anyio import Path -from fps.logging import get_configured_logger # type: ignore +from jupyverse_api import Singleton from watchfiles import Change, awatch -watchfiles_logger = get_configured_logger("watchfiles.main", "warning") -logger = get_configured_logger("contents") +logger = logging.getLogger("contents") class Watcher: @@ -29,121 +29,136 @@ def notify(self, change): self._event.set() -class Singleton(type): - _instances: Dict = {} - - def __call__(cls, *args, **kwargs): - if cls not in cls._instances: - cls._instances[cls] = super(Singleton, cls).__call__(*args, **kwargs) - return cls._instances[cls] - - class FileIdManager(metaclass=Singleton): db_path: str initialized: asyncio.Event watchers: Dict[str, List[Watcher]] + lock: asyncio.Lock def __init__(self, db_path: str = "fileid.db"): self.db_path = db_path self.initialized = asyncio.Event() self.watchers = {} - self._watch_files_task = asyncio.create_task(self.watch_files()) + self.watch_files_task = asyncio.create_task(self.watch_files()) + self.stop_watching_files = asyncio.Event() + self.stopped_watching_files = asyncio.Event() + self.lock = asyncio.Lock() async def get_id(self, path: str) -> Optional[str]: await self.initialized.wait() - async with aiosqlite.connect(self.db_path) as db: - async with db.execute("SELECT id FROM fileids WHERE path = ?", (path,)) as cursor: - async for idx, in cursor: - return idx - return None + async with self.lock: + async with aiosqlite.connect(self.db_path) as db: + async with db.execute("SELECT id FROM fileids WHERE path = ?", (path,)) as cursor: + async for idx, in cursor: + return idx + return None async def get_path(self, idx: str) -> Optional[str]: await self.initialized.wait() - async with aiosqlite.connect(self.db_path) as db: - async with db.execute("SELECT path FROM fileids WHERE id = ?", (idx,)) as cursor: - async for path, in cursor: - return path - return None + async with self.lock: + async with aiosqlite.connect(self.db_path) as db: + async with db.execute("SELECT path FROM fileids WHERE id = ?", (idx,)) as cursor: + async for path, in cursor: + return path + return None async def index(self, path: str) -> Optional[str]: await self.initialized.wait() - async with aiosqlite.connect(self.db_path) as db: - apath = Path(path) - if not await apath.exists(): - return None + async with self.lock: + async with aiosqlite.connect(self.db_path) as db: + apath = Path(path) + if not await apath.exists(): + return None - idx = uuid4().hex - mtime = (await apath.stat()).st_mtime - await db.execute("INSERT INTO fileids VALUES (?, ?, ?)", (idx, path, mtime)) - await db.commit() - return idx + idx = uuid4().hex + mtime = (await apath.stat()).st_mtime + await db.execute("INSERT INTO fileids VALUES (?, ?, ?)", (idx, path, mtime)) + await db.commit() + return idx async def watch_files(self): - async with aiosqlite.connect(self.db_path) as db: - await db.execute("DROP TABLE IF EXISTS fileids") - await db.execute( - "CREATE TABLE fileids " - "(id TEXT PRIMARY KEY, path TEXT NOT NULL UNIQUE, mtime REAL NOT NULL)" - ) - await db.commit() + async with self.lock: + async with aiosqlite.connect(self.db_path) as db: + await db.execute("DROP TABLE IF EXISTS fileids") + await db.execute( + "CREATE TABLE fileids " + "(id TEXT PRIMARY KEY, path TEXT NOT NULL UNIQUE, mtime REAL NOT NULL)" + ) + await db.commit() # index files - async with aiosqlite.connect(self.db_path) as db: - async for path in Path().rglob("*"): - idx = uuid4().hex - mtime = (await path.stat()).st_mtime - await db.execute("INSERT INTO fileids VALUES (?, ?, ?)", (idx, str(path), mtime)) - await db.commit() - self.initialized.set() - - async for changes in awatch("."): - deleted_paths = [] - added_paths = [] - for change, changed_path in changes: - # get relative path - changed_path = Path(changed_path).relative_to(await Path().absolute()) - changed_path_str = str(changed_path) - - if change == Change.deleted: - logger.debug("File %s was deleted", changed_path_str) - async with db.execute( - "SELECT COUNT(*) FROM fileids WHERE path = ?", (changed_path_str,) - ) as cursor: - if not (await cursor.fetchone())[0]: - # path is not indexed, ignore - logger.debug("File %s is not indexed, ignoring", changed_path_str) - continue - # path is indexed - await maybe_rename(db, changed_path_str, deleted_paths, added_paths, False) - elif change == Change.added: - logger.debug("File %s was added", changed_path_str) - await maybe_rename(db, changed_path_str, added_paths, deleted_paths, True) - elif change == Change.modified: - logger.debug("File %s was modified", changed_path_str) - if changed_path_str == self.db_path: - continue - async with db.execute( - "SELECT COUNT(*) FROM fileids WHERE path = ?", (changed_path_str,) - ) as cursor: - if not (await cursor.fetchone())[0]: - # path is not indexed, ignore - logger.debug("File %s is not indexed, ignoring", changed_path_str) - continue - mtime = (await changed_path.stat()).st_mtime - await db.execute( - "UPDATE fileids SET mtime = ? WHERE path = ?", (mtime, changed_path_str) - ) - - for path in deleted_paths + added_paths: - await db.execute("DELETE FROM fileids WHERE path = ?", (path,)) + async with self.lock: + async with aiosqlite.connect(self.db_path) as db: + async for path in Path().rglob("*"): + idx = uuid4().hex + mtime = (await path.stat()).st_mtime + await db.execute( + "INSERT INTO fileids VALUES (?, ?, ?)", (idx, str(path), mtime) + ) await db.commit() - - for change in changes: - changed_path = change[1] - # get relative path - changed_path = str(Path(changed_path).relative_to(await Path().absolute())) - for watcher in self.watchers.get(changed_path, []): - watcher.notify(change) + self.initialized.set() + + async for changes in awatch(".", stop_event=self.stop_watching_files): + async with self.lock: + async with aiosqlite.connect(self.db_path) as db: + deleted_paths = [] + added_paths = [] + for change, changed_path in changes: + # get relative path + changed_path = Path(changed_path).relative_to(await Path().absolute()) + changed_path_str = str(changed_path) + + if change == Change.deleted: + logger.debug("File %s was deleted", changed_path_str) + async with db.execute( + "SELECT COUNT(*) FROM fileids WHERE path = ?", (changed_path_str,) + ) as cursor: + if not (await cursor.fetchone())[0]: + # path is not indexed, ignore + logger.debug( + "File %s is not indexed, ignoring", changed_path_str + ) + continue + # path is indexed + await maybe_rename( + db, changed_path_str, deleted_paths, added_paths, False + ) + elif change == Change.added: + logger.debug("File %s was added", changed_path_str) + await maybe_rename( + db, changed_path_str, added_paths, deleted_paths, True + ) + elif change == Change.modified: + logger.debug("File %s was modified", changed_path_str) + if changed_path_str == self.db_path: + continue + async with db.execute( + "SELECT COUNT(*) FROM fileids WHERE path = ?", (changed_path_str,) + ) as cursor: + if not (await cursor.fetchone())[0]: + # path is not indexed, ignore + logger.debug( + "File %s is not indexed, ignoring", changed_path_str + ) + continue + mtime = (await changed_path.stat()).st_mtime + await db.execute( + "UPDATE fileids SET mtime = ? WHERE path = ?", + (mtime, changed_path_str), + ) + + for path in deleted_paths + added_paths: + await db.execute("DELETE FROM fileids WHERE path = ?", (path,)) + await db.commit() + + for change in changes: + changed_path = change[1] + # get relative path + changed_path = str(Path(changed_path).relative_to(await Path().absolute())) + for watcher in self.watchers.get(changed_path, []): + watcher.notify(change) + + self.stopped_watching_files.set() def watch(self, path: str) -> Watcher: watcher = Watcher(path) diff --git a/plugins/contents/fps_contents/main.py b/plugins/contents/fps_contents/main.py new file mode 100644 index 00000000..36dc5cfe --- /dev/null +++ b/plugins/contents/fps_contents/main.py @@ -0,0 +1,18 @@ +from asphalt.core import Component, Context +from jupyverse_api.app import App +from jupyverse_api.auth import Auth +from jupyverse_api.contents import Contents + +from .routes import _Contents + + +class ContentsComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + + contents = _Contents(app, auth) + ctx.add_resource(contents, types=Contents) diff --git a/plugins/contents/fps_contents/models.py b/plugins/contents/fps_contents/models.py index fbedbece..5d628cc0 100644 --- a/plugins/contents/fps_contents/models.py +++ b/plugins/contents/fps_contents/models.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional, Union +from typing import Optional from pydantic import BaseModel @@ -8,13 +8,6 @@ class Checkpoint(BaseModel): last_modified: str -class SaveContent(BaseModel): - content: Optional[Union[str, Dict]] - format: str - path: str - type: str - - class CreateContent(BaseModel): ext: Optional[str] path: str @@ -23,16 +16,3 @@ class CreateContent(BaseModel): class RenameContent(BaseModel): path: str - - -class Content(BaseModel): - name: str - path: str - last_modified: Optional[str] - created: Optional[str] - content: Optional[Union[str, Dict, List[Dict]]] - format: Optional[str] - mimetype: Optional[str] - size: Optional[int] - writable: bool - type: str diff --git a/plugins/contents/fps_contents/routes.py b/plugins/contents/fps_contents/routes.py index cbbdea40..11304dc0 100644 --- a/plugins/contents/fps_contents/routes.py +++ b/plugins/contents/fps_contents/routes.py @@ -8,141 +8,258 @@ from anyio import open_file from fastapi import APIRouter, Depends, HTTPException, Response -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user # type: ignore -from starlette.requests import Request # type: ignore - -from .models import Checkpoint, Content, CreateContent, RenameContent, SaveContent - -router = APIRouter() - - -@router.post( - "/api/contents/{path:path}/checkpoints", - status_code=201, -) -async def create_checkpoint( - path, user: User = Depends(current_user(permissions={"contents": ["write"]})) -): - src_path = Path(path) - dst_path = Path(".ipynb_checkpoints") / f"{src_path.stem}-checkpoint{src_path.suffix}" - try: - dst_path.parent.mkdir(exist_ok=True) - shutil.copyfile(src_path, dst_path) - except Exception: - # FIXME: return error code? - return [] - mtime = get_file_modification_time(dst_path) - return Checkpoint(**{"id": "checkpoint", "last_modified": mtime}) - - -@router.post( - "/api/contents{path:path}", - status_code=201, -) -async def create_content( - path: Optional[str], - request: Request, - user: User = Depends(current_user(permissions={"contents": ["write"]})), -): - create_content = CreateContent(**(await request.json())) - content_path = Path(create_content.path) - if create_content.type == "notebook": - available_path = get_available_path(content_path / "Untitled.ipynb") - async with await open_file(available_path, "w") as f: - await f.write( - json.dumps({"cells": [], "metadata": {}, "nbformat": 4, "nbformat_minor": 5}) - ) - src_path = available_path - dst_path = Path(".ipynb_checkpoints") / f"{src_path.stem}-checkpoint{src_path.suffix}" - try: - dst_path.parent.mkdir(exist_ok=True) - shutil.copyfile(src_path, dst_path) - except Exception: - # FIXME: return error code? - pass - elif create_content.type == "directory": - name = "Untitled Folder" - available_path = get_available_path(content_path / name, sep=" ") - available_path.mkdir(parents=True, exist_ok=True) - else: - assert create_content.ext is not None - available_path = get_available_path(content_path / ("untitled" + create_content.ext)) - open(available_path, "w").close() - - return await read_content(available_path, False) - - -@router.get("/api/contents") -async def get_root_content( - content: int, - user: User = Depends(current_user(permissions={"contents": ["read"]})), -): - return await read_content("", bool(content)) - - -@router.get("/api/contents/{path:path}/checkpoints") -async def get_checkpoint( - path, user: User = Depends(current_user(permissions={"contents": ["read"]})) -): - src_path = Path(path) - dst_path = Path(".ipynb_checkpoints") / f"{src_path.stem}-checkpoint{src_path.suffix}" - if not dst_path.exists(): - return [] - mtime = get_file_modification_time(dst_path) - return [Checkpoint(**{"id": "checkpoint", "last_modified": mtime})] - - -@router.get("/api/contents/{path:path}") -async def get_content( - path: str, - content: int = 0, - user: User = Depends(current_user(permissions={"contents": ["read"]})), -): - return await read_content(path, bool(content)) - - -@router.put("/api/contents/{path:path}") -async def save_content( - path, - request: Request, - response: Response, - user: User = Depends(current_user(permissions={"contents": ["write"]})), -): - content = SaveContent(**(await request.json())) - try: - await write_content(content) - except Exception: - raise HTTPException(status_code=404, detail=f"Error saving {content.path}") - return await read_content(content.path, False) - - -@router.delete( - "/api/contents/{path:path}", - status_code=204, -) -async def delete_content( - path, - user: User = Depends(current_user(permissions={"contents": ["write"]})), -): - p = Path(path) - if p.exists(): - if p.is_dir(): - shutil.rmtree(p) +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.contents import Contents, Content, SaveContent +from starlette.requests import Request + +from .fileid import FileIdManager +from .models import Checkpoint, CreateContent, RenameContent + + +class _Contents(Contents): + def __init__(self, app: App, auth: Auth): + super().__init__(app=app) + + router = APIRouter() + + @router.post( + "/api/contents/{path:path}/checkpoints", + status_code=201, + ) + async def create_checkpoint( + path, user: User = Depends(auth.current_user(permissions={"contents": ["write"]})) + ): + src_path = Path(path) + dst_path = Path(".ipynb_checkpoints") / f"{src_path.stem}-checkpoint{src_path.suffix}" + try: + dst_path.parent.mkdir(exist_ok=True) + shutil.copyfile(src_path, dst_path) + except Exception: + # FIXME: return error code? + return [] + mtime = get_file_modification_time(dst_path) + return Checkpoint(**{"id": "checkpoint", "last_modified": mtime}) + + @router.post( + "/api/contents{path:path}", + status_code=201, + ) + async def create_content( + path: Optional[str], + request: Request, + user: User = Depends(auth.current_user(permissions={"contents": ["write"]})), + ): + create_content = CreateContent(**(await request.json())) + content_path = Path(create_content.path) + if create_content.type == "notebook": + available_path = get_available_path(content_path / "Untitled.ipynb") + async with await open_file(available_path, "w") as f: + await f.write( + json.dumps( + {"cells": [], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} + ) + ) + src_path = available_path + dst_path = ( + Path(".ipynb_checkpoints") / f"{src_path.stem}-checkpoint{src_path.suffix}" + ) + try: + dst_path.parent.mkdir(exist_ok=True) + shutil.copyfile(src_path, dst_path) + except Exception: + # FIXME: return error code? + pass + elif create_content.type == "directory": + name = "Untitled Folder" + available_path = get_available_path(content_path / name, sep=" ") + available_path.mkdir(parents=True, exist_ok=True) + else: + assert create_content.ext is not None + available_path = get_available_path( + content_path / ("untitled" + create_content.ext) + ) + open(available_path, "w").close() + + return await self.read_content(available_path, False) + + @router.get("/api/contents") + async def get_root_content( + content: int, + user: User = Depends(auth.current_user(permissions={"contents": ["read"]})), + ): + return await self.read_content("", bool(content)) + + @router.get("/api/contents/{path:path}/checkpoints") + async def get_checkpoint( + path, user: User = Depends(auth.current_user(permissions={"contents": ["read"]})) + ): + src_path = Path(path) + dst_path = Path(".ipynb_checkpoints") / f"{src_path.stem}-checkpoint{src_path.suffix}" + if not dst_path.exists(): + return [] + mtime = get_file_modification_time(dst_path) + return [Checkpoint(**{"id": "checkpoint", "last_modified": mtime})] + + @router.get("/api/contents/{path:path}") + async def get_content( + path: str, + content: int = 0, + user: User = Depends(auth.current_user(permissions={"contents": ["read"]})), + ): + return await self.read_content(path, bool(content)) + + @router.put("/api/contents/{path:path}") + async def save_content( + path, + request: Request, + response: Response, + user: User = Depends(auth.current_user(permissions={"contents": ["write"]})), + ): + content = SaveContent(**(await request.json())) + try: + await self.write_content(content) + except Exception: + raise HTTPException(status_code=404, detail=f"Error saving {content.path}") + return await self.read_content(content.path, False) + + @router.delete( + "/api/contents/{path:path}", + status_code=204, + ) + async def delete_content( + path, + user: User = Depends(auth.current_user(permissions={"contents": ["write"]})), + ): + p = Path(path) + if p.exists(): + if p.is_dir(): + shutil.rmtree(p) + else: + p.unlink() + return Response(status_code=HTTPStatus.NO_CONTENT.value) + + @router.patch("/api/contents/{path:path}") + async def rename_content( + path, + request: Request, + user: User = Depends(auth.current_user(permissions={"contents": ["write"]})), + ): + rename_content = RenameContent(**(await request.json())) + Path(path).rename(rename_content.path) + return await self.read_content(rename_content.path, False) + + self.include_router(router) + + async def read_content( + self, path: Union[str, Path], get_content: bool, as_json: bool = False + ) -> Content: + if isinstance(path, str): + path = Path(path) + content: Optional[Union[str, Dict, List[Dict]]] = None + if get_content: + if path.is_dir(): + content = [ + (await self.read_content(subpath, get_content=False)).dict() + for subpath in path.iterdir() + if not subpath.name.startswith(".") + ] + elif path.is_file() or path.is_symlink(): + try: + async with await open_file(path) as f: + content = await f.read() + if as_json: + content = json.loads(content) + except Exception: + raise HTTPException(status_code=404, detail="Item not found") + format: Optional[str] = None + if path.is_dir(): + size = None + type = "directory" + format = "json" + mimetype = None + elif path.is_file() or path.is_symlink(): + size = get_file_size(path) + if path.suffix == ".ipynb": + type = "notebook" + format = None + mimetype = None + if content is not None: + nb: dict + if as_json: + content = cast(Dict, content) + nb = content + else: + content = cast(str, content) + nb = json.loads(content) + for cell in nb["cells"]: + if "metadata" not in cell: + cell["metadata"] = {} + cell["metadata"].update({"trusted": False}) + if not as_json: + content = json.dumps(nb) + elif path.suffix == ".json": + type = "json" + format = "text" + mimetype = "application/json" + else: + type = "file" + format = None + mimetype = "text/plain" else: - p.unlink() - return Response(status_code=HTTPStatus.NO_CONTENT.value) + raise HTTPException(status_code=404, detail="Item not found") + + return Content( + **{ + "name": path.name, + "path": path.as_posix(), + "last_modified": get_file_modification_time(path), + "created": get_file_creation_time(path), + "content": content, + "format": format, + "mimetype": mimetype, + "size": size, + "writable": is_file_writable(path), + "type": type, + } + ) + + async def write_content(self, content: Union[SaveContent, Dict]) -> None: + if not isinstance(content, SaveContent): + content = SaveContent(**content) + async with await open_file(content.path, "w") as f: + if content.format == "json": + dict_content = cast(Dict, content.content) + if content.type == "notebook": + # see https://github.com/jupyterlab/jupyterlab/issues/11005 + if "metadata" in dict_content and "orig_nbformat" in dict_content["metadata"]: + del dict_content["metadata"]["orig_nbformat"] + await f.write(json.dumps(dict_content, indent=2)) + else: + content.content = cast(str, content.content) + await f.write(content.content) + + @property + def file_id_manager(self): + return FileIdManager() -@router.patch("/api/contents/{path:path}") -async def rename_content( - path, - request: Request, - user: User = Depends(current_user(permissions={"contents": ["write"]})), -): - rename_content = RenameContent(**(await request.json())) - Path(path).rename(rename_content.path) - return await read_content(rename_content.path, False) +def get_available_path(path: Path, sep: str = ""): + directory = path.parent + name = Path(path.name) + i = None + while True: + if i is None: + i_str = "" + i = 1 + else: + i_str = str(i) + i += 1 + if i_str: + i_str = sep + i_str + available_path = directory / (name.stem + i_str + name.suffix) + if not available_path.exists(): + return available_path def get_file_modification_time(path: Path): @@ -169,112 +286,3 @@ def is_file_writable(path: Path) -> bool: else: return os.access(path, os.W_OK) return False - - -async def read_content(path: Union[str, Path], get_content: bool, as_json: bool = False) -> Content: - if isinstance(path, str): - path = Path(path) - content: Optional[Union[str, Dict, List[Dict]]] = None - if get_content: - if path.is_dir(): - content = [ - (await read_content(subpath, get_content=False)).dict() - for subpath in path.iterdir() - if not subpath.name.startswith(".") - ] - elif path.is_file() or path.is_symlink(): - try: - async with await open_file(path) as f: - content = await f.read() - if as_json: - content = json.loads(content) - except Exception: - raise HTTPException(status_code=404, detail="Item not found") - format: Optional[str] = None - if path.is_dir(): - size = None - type = "directory" - format = "json" - mimetype = None - elif path.is_file() or path.is_symlink(): - size = get_file_size(path) - if path.suffix == ".ipynb": - type = "notebook" - format = None - mimetype = None - if content is not None: - nb: dict - if as_json: - content = cast(Dict, content) - nb = content - else: - content = cast(str, content) - nb = json.loads(content) - for cell in nb["cells"]: - if "metadata" not in cell: - cell["metadata"] = {} - cell["metadata"].update({"trusted": False}) - if not as_json: - content = json.dumps(nb) - elif path.suffix == ".json": - type = "json" - format = "text" - mimetype = "application/json" - else: - type = "file" - format = None - mimetype = "text/plain" - else: - raise HTTPException(status_code=404, detail="Item not found") - - return Content( - **{ - "name": path.name, - "path": path.as_posix(), - "last_modified": get_file_modification_time(path), - "created": get_file_creation_time(path), - "content": content, - "format": format, - "mimetype": mimetype, - "size": size, - "writable": is_file_writable(path), - "type": type, - } - ) - - -async def write_content(content: Union[SaveContent, Dict]) -> None: - if not isinstance(content, SaveContent): - content = SaveContent(**content) - async with await open_file(content.path, "w") as f: - if content.format == "json": - dict_content = cast(Dict, content.content) - if content.type == "notebook": - # see https://github.com/jupyterlab/jupyterlab/issues/11005 - if "metadata" in dict_content and "orig_nbformat" in dict_content["metadata"]: - del dict_content["metadata"]["orig_nbformat"] - await f.write(json.dumps(dict_content, indent=2)) - else: - content.content = cast(str, content.content) - await f.write(content.content) - - -def get_available_path(path: Path, sep: str = ""): - directory = path.parent - name = Path(path.name) - i = None - while True: - if i is None: - i_str = "" - i = 1 - else: - i_str = str(i) - i += 1 - if i_str: - i_str = sep + i_str - available_path = directory / (name.stem + i_str + name.suffix) - if not available_path.exists(): - return available_path - - -r = register_router(router) diff --git a/plugins/contents/pyproject.toml b/plugins/contents/pyproject.toml index 6251e8aa..f78ee434 100644 --- a/plugins/contents/pyproject.toml +++ b/plugins/contents/pyproject.toml @@ -5,9 +5,13 @@ build-backend = "hatchling.build" [project] name = "fps_contents" description = "An FPS plugin for the contents API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "anyio", "watchfiles >=0.16.1,<1", "aiosqlite >=0.17.0,<1", "anyio>=3.6.2,<4"] +dependencies = [ + "watchfiles >=0.18.1,<1", + "aiosqlite >=0.17.0,<1", + "anyio>=3.6.2,<4", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,8 +33,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-contents = "fps_contents.routes" +[project.entry-points."asphalt.components"] +contents = "fps_contents.main:ContentsComponent" [tool.hatch.version] path = "fps_contents/__init__.py" diff --git a/plugins/frontend/fps_frontend/config.py b/plugins/frontend/fps_frontend/config.py deleted file mode 100644 index 1ee422e9..00000000 --- a/plugins/frontend/fps_frontend/config.py +++ /dev/null @@ -1,13 +0,0 @@ -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore - - -class FrontendConfig(PluginModel): - base_url: str = "/" - - -def get_frontend_config(): - return get_config(FrontendConfig) - - -c = register_config(FrontendConfig) diff --git a/plugins/frontend/fps_frontend/main.py b/plugins/frontend/fps_frontend/main.py new file mode 100644 index 00000000..1ff637a5 --- /dev/null +++ b/plugins/frontend/fps_frontend/main.py @@ -0,0 +1,13 @@ +from asphalt.core import Component, Context +from jupyverse_api.frontend import FrontendConfig + + +class FrontendComponent(Component): + def __init__(self, **kwargs): + self.frontend_config = FrontendConfig(**kwargs) + + async def start( + self, + ctx: Context, + ) -> None: + ctx.add_resource(self.frontend_config, types=FrontendConfig) diff --git a/plugins/frontend/fps_frontend/py.typed b/plugins/frontend/fps_frontend/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/plugins/frontend/pyproject.toml b/plugins/frontend/pyproject.toml index 04d8965d..ac50f827 100644 --- a/plugins/frontend/pyproject.toml +++ b/plugins/frontend/pyproject.toml @@ -5,9 +5,8 @@ build-backend = "hatchling.build" [project] name = "fps_frontend" description = "An FPS plugin for the frontend related configuration" -keywords = ["fastapi", "pluggy", "plugins", "fps"] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = ["fps>=0.0.8"] dynamic = ["version"] [[project.authors]] @@ -30,8 +29,8 @@ ignore = [".*"] [tool.jupyter-releaser] skip = ["check-links"] -[project.entry-points.fps_config] -fps-frontend = "fps_frontend.config" +[project.entry-points."asphalt.components"] +frontend = "fps_frontend.main:FrontendComponent" [tool.hatch.version] path = "fps_frontend/__init__.py" diff --git a/plugins/jupyterlab/fps_jupyterlab/config.py b/plugins/jupyterlab/fps_jupyterlab/config.py deleted file mode 100644 index 73b57ec4..00000000 --- a/plugins/jupyterlab/fps_jupyterlab/config.py +++ /dev/null @@ -1,13 +0,0 @@ -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore - - -class JupyterLabConfig(PluginModel): - dev_mode: bool = False - - -def get_jlab_config(): - return get_config(JupyterLabConfig) - - -c = register_config(JupyterLabConfig) diff --git a/plugins/jupyterlab/fps_jupyterlab/index.py b/plugins/jupyterlab/fps_jupyterlab/index.py new file mode 100644 index 00000000..d25f1631 --- /dev/null +++ b/plugins/jupyterlab/fps_jupyterlab/index.py @@ -0,0 +1,32 @@ +INDEX_HTML = """\ +JupyterLab + + +VENDORS_NODE_MODULES + + +""" diff --git a/plugins/jupyterlab/fps_jupyterlab/main.py b/plugins/jupyterlab/fps_jupyterlab/main.py new file mode 100644 index 00000000..ec2fd86f --- /dev/null +++ b/plugins/jupyterlab/fps_jupyterlab/main.py @@ -0,0 +1,27 @@ +from asphalt.core import Component, Context +from jupyverse_api.app import App +from jupyverse_api.auth import Auth +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.jupyterlab import JupyterLab, JupyterLabConfig +from jupyverse_api.lab import Lab + +from .routes import _JupyterLab + + +class JupyterLabComponent(Component): + def __init__(self, **kwargs): + self.jupyterlab_config = JupyterLabConfig(**kwargs) + + async def start( + self, + ctx: Context, + ) -> None: + ctx.add_resource(self.jupyterlab_config, types=JupyterLabConfig) + + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + frontend_config = await ctx.request_resource(FrontendConfig) + lab = await ctx.request_resource(Lab) # type: ignore + + jupyterlab = _JupyterLab(app, self.jupyterlab_config, auth, frontend_config, lab) + ctx.add_resource(jupyterlab, types=JupyterLab) diff --git a/plugins/jupyterlab/fps_jupyterlab/py.typed b/plugins/jupyterlab/fps_jupyterlab/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/plugins/jupyterlab/fps_jupyterlab/routes.py b/plugins/jupyterlab/fps_jupyterlab/routes.py index 7a2b9887..c49fa8e9 100644 --- a/plugins/jupyterlab/fps_jupyterlab/routes.py +++ b/plugins/jupyterlab/fps_jupyterlab/routes.py @@ -2,206 +2,176 @@ from http import HTTPStatus from pathlib import Path -import jupyterlab # type: ignore +import jupyterlab as jupyterlab_module # type: ignore from fastapi import APIRouter, Depends, Response from fastapi.responses import HTMLResponse from fastapi.staticfiles import StaticFiles -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user, update_user # type: ignore -from fps_frontend.config import get_frontend_config # type: ignore -from fps_lab.config import get_lab_config # type: ignore -from fps_lab.routes import init_router # type: ignore -from fps_lab.utils import get_federated_extensions # type: ignore -from starlette.requests import Request # type: ignore - -from .config import get_jlab_config - -router = APIRouter() -prefix_dir, federated_extensions = init_router(router, "lab") -jupyterlab_dir = Path(jupyterlab.__file__).parents[1] - -config = get_jlab_config() -if config.dev_mode: - static_lab_dir = jupyterlab_dir / "dev_mode" / "static" -else: - static_lab_dir = prefix_dir / "share" / "jupyter" / "lab" / "static" - -router.mount( - "/static/lab", - StaticFiles(directory=static_lab_dir), - name="static", -) - - -@router.get("/lab") -async def get_lab( - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return HTMLResponse( - get_index( - "default", - lab_config.collaborative, - config.dev_mode, - frontend_config.base_url, +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.jupyterlab import JupyterLab, JupyterLabConfig +from jupyverse_api.lab import Lab +from starlette.requests import Request + +from .index import INDEX_HTML + + +class _JupyterLab(JupyterLab): + def __init__( + self, + app: App, + jupyterlab_config: JupyterLabConfig, + auth: Auth, + frontend_config: FrontendConfig, + lab: Lab, + ) -> None: + super().__init__(app) + + router = APIRouter() + self.prefix_dir, federated_extensions = lab.init_router(router, "lab") + extensions_dir = self.prefix_dir / "share" / "jupyter" / "labextensions" + self.federated_extensions, self.disabled_extension = lab.get_federated_extensions( + extensions_dir ) - ) + jupyterlab_dir = Path(jupyterlab_module.__file__).parents[1] + if jupyterlab_config.dev_mode: + self.static_lab_dir = jupyterlab_dir / "dev_mode" / "static" + else: + self.static_lab_dir = self.prefix_dir / "share" / "jupyter" / "lab" / "static" -@router.get("/lab/tree/{path:path}") -async def load_workspace( - path, - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return HTMLResponse( - get_index( - "default", - lab_config.collaborative, - config.dev_mode, - frontend_config.base_url, + self.mount( + "/static/lab", + StaticFiles(directory=self.static_lab_dir), + name="static", ) - ) - - -@router.get("/lab/api/workspaces/{name}") -async def get_workspace_data(user: User = Depends(current_user())): - if user: - return json.loads(user.workspace) - return {} - - -@router.put( - "/lab/api/workspaces/{name}", - status_code=204, -) -async def set_workspace( - request: Request, - user: User = Depends(current_user()), - user_update=Depends(update_user), -): - workspace = (await request.body()).decode("utf-8") - await user_update({"workspace": workspace}) - return Response(status_code=HTTPStatus.NO_CONTENT.value) - - -@router.get("/lab/workspaces/{name}", response_class=HTMLResponse) -async def get_workspace( - name, - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return get_index(name, lab_config.collaborative, config.dev_mode, frontend_config.base_url) - -INDEX_HTML = """\ -JupyterLab - - -VENDORS_NODE_MODULES - - -""" - - -def get_index(workspace, collaborative, dev_mode, base_url="/"): - for path in (static_lab_dir).glob("main.*.js"): - main_id = path.name.split(".")[1] - break - vendor_id = None - for path in (static_lab_dir).glob("vendors-node_modules_whatwg-fetch_fetch_js.*.js"): - vendor_id = path.name.split(".")[1] - break - full_static_url = f"{base_url}static/lab" - extensions_dir = prefix_dir / "share" / "jupyter" / "labextensions" - federated_extensions, disabled_extension = get_federated_extensions(extensions_dir) - - page_config = { - "appName": "JupyterLab", - "appNamespace": "lab", - "appUrl": "/lab", - "appVersion": jupyterlab.__version__, - "baseUrl": base_url, - "cacheFiles": False, - "collaborative": collaborative, - "devMode": dev_mode, - "disabledExtensions": disabled_extension, - "exposeAppInBrowser": False, - "extraLabextensionsPath": [], - "federated_extensions": federated_extensions, - "fullAppUrl": f"{base_url}lab", - "fullLabextensionsUrl": f"{base_url}lab/extensions", - "fullLicensesUrl": f"{base_url}lab/api/licenses", - "fullListingsUrl": f"{base_url}lab/api/listings", - "fullMathjaxUrl": f"{base_url}static/notebook/components/MathJax/MathJax.js", - "fullSettingsUrl": f"{base_url}lab/api/settings", - "fullStaticUrl": full_static_url, - "fullThemesUrl": f"{base_url}lab/api/themes", - "fullTranslationsApiUrl": f"{base_url}lab/api/translations", - "fullTreeUrl": f"{base_url}lab/tree", - "fullWorkspacesApiUrl": f"{base_url}lab/api/workspaces", - "ignorePlugins": [], - "labextensionsUrl": "/lab/extensions", - "licensesUrl": "/lab/api/licenses", - "listingsUrl": "/lab/api/listings", - "mathjaxConfig": "TeX-AMS-MML_HTMLorMML-full,Safe", - "mode": "multiple-document", - "notebookVersion": "[1, 9, 0]", - "quitButton": True, - "settingsUrl": "/lab/api/settings", - "store_id": 0, - "schemasDir": (prefix_dir / "share" / "jupyter" / "lab" / "schemas").as_posix(), - "terminalsAvailable": True, - "themesDir": (prefix_dir / "share" / "jupyter" / "lab" / "themes").as_posix(), - "themesUrl": "/lab/api/themes", - "token": "4e2804532de366abc81e32ab0c6bf68a73716fafbdbb2098", - "translationsApiUrl": "/lab/api/translations", - "treePath": "", - "workspace": workspace, - "treeUrl": "/lab/tree", - "workspacesApiUrl": "/lab/api/workspaces", - "wsUrl": "", - } - index = ( - INDEX_HTML.replace("PAGE_CONFIG", json.dumps(page_config)) - .replace("FULL_STATIC_URL", full_static_url) - .replace("MAIN_ID", main_id) - ) - if vendor_id: - index = index.replace( - "VENDORS_NODE_MODULES", - '', + @router.get("/lab") + async def get_lab( + user: User = Depends(auth.current_user()), + ): + return HTMLResponse( + self.get_index( + "default", + frontend_config.collaborative, + jupyterlab_config.dev_mode, + frontend_config.base_url, + ) + ) + + @router.get("/lab/tree/{path:path}") + async def load_workspace( + path, + ): + return HTMLResponse( + self.get_index( + "default", + frontend_config.collaborative, + jupyterlab_config.dev_mode, + frontend_config.base_url, + ) + ) + + @router.get("/lab/api/workspaces/{name}") + async def get_workspace_data(user: User = Depends(auth.current_user())): + if user: + return json.loads(user.workspace) + return {} + + @router.put( + "/lab/api/workspaces/{name}", + status_code=204, ) - else: - index = index.replace("VENDORS_NODE_MODULES", "") - return index - - -r = register_router(router) + async def set_workspace( + request: Request, + user: User = Depends(auth.current_user()), + user_update=Depends(auth.update_user), + ): + workspace = (await request.body()).decode("utf-8") + await user_update({"workspace": workspace}) + return Response(status_code=HTTPStatus.NO_CONTENT.value) + + @router.get("/lab/workspaces/{name}", response_class=HTMLResponse) + async def get_workspace( + name, + user: User = Depends(auth.current_user()), + ): + return self.get_index( + name, + frontend_config.collaborative, + jupyterlab_config.dev_mode, + frontend_config.base_url, + ) + + self.include_router(router) + + def get_index(self, workspace, collaborative, dev_mode, base_url="/"): + for path in (self.static_lab_dir).glob("main.*.js"): + main_id = path.name.split(".")[1] + break + vendor_id = None + for path in (self.static_lab_dir).glob("vendors-node_modules_whatwg-fetch_fetch_js.*.js"): + vendor_id = path.name.split(".")[1] + break + full_static_url = f"{base_url}static/lab" + + page_config = { + "appName": "JupyterLab", + "appNamespace": "lab", + "appUrl": "/lab", + "appVersion": jupyterlab_module.__version__, + "baseUrl": base_url, + "cacheFiles": False, + "collaborative": collaborative, + "devMode": dev_mode, + "disabledExtensions": self.disabled_extension, + "exposeAppInBrowser": False, + "extraLabextensionsPath": [], + "federated_extensions": self.federated_extensions, + "fullAppUrl": f"{base_url}lab", + "fullLabextensionsUrl": f"{base_url}lab/extensions", + "fullLicensesUrl": f"{base_url}lab/api/licenses", + "fullListingsUrl": f"{base_url}lab/api/listings", + "fullMathjaxUrl": f"{base_url}static/notebook/components/MathJax/MathJax.js", + "fullSettingsUrl": f"{base_url}lab/api/settings", + "fullStaticUrl": full_static_url, + "fullThemesUrl": f"{base_url}lab/api/themes", + "fullTranslationsApiUrl": f"{base_url}lab/api/translations", + "fullTreeUrl": f"{base_url}lab/tree", + "fullWorkspacesApiUrl": f"{base_url}lab/api/workspaces", + "ignorePlugins": [], + "labextensionsUrl": "/lab/extensions", + "licensesUrl": "/lab/api/licenses", + "listingsUrl": "/lab/api/listings", + "mathjaxConfig": "TeX-AMS-MML_HTMLorMML-full,Safe", + "mode": "multiple-document", + "notebookVersion": "[1, 9, 0]", + "quitButton": True, + "settingsUrl": "/lab/api/settings", + "store_id": 0, + "schemasDir": (self.prefix_dir / "share" / "jupyter" / "lab" / "schemas").as_posix(), + "terminalsAvailable": True, + "themesDir": (self.prefix_dir / "share" / "jupyter" / "lab" / "themes").as_posix(), + "themesUrl": "/lab/api/themes", + "token": "4e2804532de366abc81e32ab0c6bf68a73716fafbdbb2098", + "translationsApiUrl": "/lab/api/translations", + "treePath": "", + "workspace": workspace, + "treeUrl": "/lab/tree", + "workspacesApiUrl": "/lab/api/workspaces", + "wsUrl": "", + } + index = ( + INDEX_HTML.replace("PAGE_CONFIG", json.dumps(page_config)) + .replace("FULL_STATIC_URL", full_static_url) + .replace("MAIN_ID", main_id) + ) + if vendor_id: + index = index.replace( + "VENDORS_NODE_MODULES", + '', + ) + else: + index = index.replace("VENDORS_NODE_MODULES", "") + return index diff --git a/plugins/jupyterlab/pyproject.toml b/plugins/jupyterlab/pyproject.toml index 68baa7f0..6cf23040 100644 --- a/plugins/jupyterlab/pyproject.toml +++ b/plugins/jupyterlab/pyproject.toml @@ -5,9 +5,12 @@ build-backend = "hatchling.build" [project] name = "fps_jupyterlab" description = "An FPS plugin for the JupyterLab API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = [ "jupyter", "server", "fastapi", "plugins" ] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "fps-frontend", "fps-lab", "jupyterlab >=4.0.0a35",] +dependencies = [ + "jupyterlab >=4.0.0a37,<5", + #"jupyter_collaboration >=1.0.0a4,<2", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,11 +32,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-jupyterlab = "fps_jupyterlab.routes" - -[project.entry-points.fps_config] -fps-jupyterlab = "fps_jupyterlab.config" +[project.entry-points."asphalt.components"] +jupyterlab = "fps_jupyterlab.main:JupyterLabComponent" [tool.hatch.version] path = "fps_jupyterlab/__init__.py" diff --git a/plugins/kernels/fps_kernels/config.py b/plugins/kernels/fps_kernels/config.py deleted file mode 100644 index 0f0b0553..00000000 --- a/plugins/kernels/fps_kernels/config.py +++ /dev/null @@ -1,16 +0,0 @@ -from typing import Optional - -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore - - -class KernelConfig(PluginModel): - default_kernel: str = "python3" - connection_path: Optional[str] = None - - -def get_kernel_config(): - return get_config(KernelConfig) - - -c = register_config(KernelConfig) diff --git a/plugins/kernels/fps_kernels/kernel_driver/driver.py b/plugins/kernels/fps_kernels/kernel_driver/driver.py index 85a03ba1..4e41061c 100644 --- a/plugins/kernels/fps_kernels/kernel_driver/driver.py +++ b/plugins/kernels/fps_kernels/kernel_driver/driver.py @@ -89,14 +89,14 @@ async def stop(self) -> None: async def listen_iopub(self): while True: - msg = await receive_message(self.iopub_channel, change_str_to_date=True) # type: ignore + msg = await receive_message(self.iopub_channel, change_str_to_date=True) msg_id = msg["parent_header"].get("msg_id") if msg_id in self.execute_requests.keys(): self.execute_requests[msg_id]["iopub_msg"].set_result(msg) async def listen_shell(self): while True: - msg = await receive_message(self.shell_channel, change_str_to_date=True) # type: ignore + msg = await receive_message(self.shell_channel, change_str_to_date=True) msg_id = msg["parent_header"].get("msg_id") if msg_id in self.execute_requests.keys(): self.execute_requests[msg_id]["shell_msg"].set_result(msg) diff --git a/plugins/kernels/fps_kernels/kernel_driver/message.py b/plugins/kernels/fps_kernels/kernel_driver/message.py index 69f05595..57120966 100644 --- a/plugins/kernels/fps_kernels/kernel_driver/message.py +++ b/plugins/kernels/fps_kernels/kernel_driver/message.py @@ -4,7 +4,7 @@ from typing import Any, Dict, List, Optional, Tuple, cast from uuid import uuid4 -from dateutil.parser import parse as dateutil_parse # type: ignore +from dateutil.parser import parse as dateutil_parse from zmq.asyncio import Socket from zmq.utils import jsonapi diff --git a/plugins/kernels/fps_kernels/kernel_server/server.py b/plugins/kernels/fps_kernels/kernel_server/server.py index ae926dd0..1b11baa5 100644 --- a/plugins/kernels/fps_kernels/kernel_server/server.py +++ b/plugins/kernels/fps_kernels/kernel_server/server.py @@ -6,17 +6,17 @@ from datetime import datetime from typing import Dict, Iterable, List, Optional, cast -from fastapi import WebSocket, WebSocketDisconnect # type: ignore +from fastapi import WebSocket, WebSocketDisconnect from starlette.websockets import WebSocketState from ..kernel_driver.connect import cfg_t, connect_channel from ..kernel_driver.connect import launch_kernel as _launch_kernel from ..kernel_driver.connect import read_connection_file from ..kernel_driver.connect import ( - write_connection_file as _write_connection_file, # type: ignore + write_connection_file as _write_connection_file, ) from ..kernel_driver.message import create_message, receive_message, send_message -from .message import ( # type: ignore +from .message import ( deserialize_msg_from_ws_v1, from_binary, get_msg_from_parts, @@ -150,6 +150,9 @@ async def stop(self) -> None: task.cancel() self.channel_tasks = [] + def interrupt(self) -> None: + self.kernel_process.send_signal(signal.SIGINT) + async def restart(self) -> None: await self.stop() self.setup_connection_file() diff --git a/plugins/kernels/fps_kernels/main.py b/plugins/kernels/fps_kernels/main.py new file mode 100644 index 00000000..28aef7d7 --- /dev/null +++ b/plugins/kernels/fps_kernels/main.py @@ -0,0 +1,50 @@ +from __future__ import annotations +import asyncio +import logging +from collections.abc import AsyncGenerator +from pathlib import Path +from typing import Optional + +from asphalt.core import Component, Context, context_teardown + +from jupyverse_api.auth import Auth +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.kernels import Kernels, KernelsConfig +from jupyverse_api.yjs import Yjs +from jupyverse_api.app import App + +from .routes import _Kernels + + +logger = logging.getLogger("kernels") + + +class KernelsComponent(Component): + def __init__(self, **kwargs): + self.kernels_config = KernelsConfig(**kwargs) + + @context_teardown + async def start( + self, + ctx: Context, + ) -> AsyncGenerator[None, Optional[BaseException]]: + ctx.add_resource(self.kernels_config, types=KernelsConfig) + + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + frontend_config = await ctx.request_resource(FrontendConfig) + yjs = await ctx.request_resource(Yjs) + + kernels = _Kernels(app, self.kernels_config, auth, frontend_config, yjs) + ctx.add_resource(kernels, types=Kernels) + + if self.kernels_config.connection_path is not None: + path = Path(self.kernels_config.connection_path) + task = asyncio.create_task(kernels.watch_connection_files(path)) + + yield + + if self.kernels_config.connection_path is not None: + task.cancel() + for kernel in kernels.kernels.values(): + await kernel["server"].stop() diff --git a/plugins/kernels/fps_kernels/routes.py b/plugins/kernels/fps_kernels/routes.py index 424d4607..ee9fee09 100644 --- a/plugins/kernels/fps_kernels/routes.py +++ b/plugins/kernels/fps_kernels/routes.py @@ -1,4 +1,3 @@ -import asyncio import json import sys import uuid @@ -8,345 +7,358 @@ from fastapi import APIRouter, Depends, HTTPException, Response from fastapi.responses import FileResponse -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user, websocket_auth # type: ignore -from fps_frontend.config import get_frontend_config # type: ignore -from fps_yjs.routes import YDocWebSocketHandler # type: ignore -from starlette.requests import Request # type: ignore +from starlette.requests import Request from watchfiles import Change, awatch - -from .config import get_kernel_config -from .kernel_driver.driver import KernelDriver # type: ignore +from jupyverse_api.auth import Auth, User +from jupyverse_api.kernels import Kernels, KernelsConfig +from jupyverse_api.kernels.models import CreateSession, Execution, Kernel, Notebook, Session +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.yjs import Yjs +from jupyverse_api.app import App + +from .kernel_driver.driver import KernelDriver from .kernel_driver.kernelspec import find_kernelspec, kernelspec_dirs -from .kernel_server.server import ( # type: ignore +from .kernel_server.server import ( AcceptedWebSocket, KernelServer, kernels, ) -from .models import CreateSession, Execution, Session - -router = APIRouter() - -kernelspecs: dict = {} -kernel_id_to_connection_file: Dict[str, str] = {} -sessions: dict = {} -prefix_dir: Path = Path(sys.prefix) - - -async def process_connection_files(changes: Set[Tuple[Change, str]]): - # get rid of "simultaneously" added/deleted files - file_changes: Dict[str, List[Change]] = {} - for c in changes: - change, path = c - if path not in file_changes: - file_changes[path] = [] - file_changes[path].append(change) - to_delete: List[str] = [] - for p, cs in file_changes.items(): - if Change.added in cs and Change.deleted in cs: - cs.remove(Change.added) - cs.remove(Change.deleted) - if not cs: - to_delete.append(p) - for p in to_delete: - del file_changes[p] - # process file changes - for path, cs in file_changes.items(): - for change in cs: - if change == Change.deleted: - if path in kernels: - kernel_id = list(kernel_id_to_connection_file.keys())[ - list(kernel_id_to_connection_file.values()).index(path) - ] - del kernels[kernel_id] - elif change == Change.added: - try: - data = json.loads(Path(path).read_text()) - except BaseException: - continue - if "kernel_name" not in data or "key" not in data: - continue - # looks like a kernel connection file - kernel_id = str(uuid.uuid4()) - kernel_id_to_connection_file[kernel_id] = path - kernels[kernel_id] = {"name": data["kernel_name"], "server": None, "driver": None} - - -async def watch_connection_files(path: Path): - # first time scan, treat everything as added files - initial_changes = {(Change.added, str(p)) for p in path.iterdir()} - await process_connection_files(initial_changes) - # then, on every change - async for changes in awatch(path): - await process_connection_files(changes) - - -@router.on_event("startup") -async def startup(): - kernel_config = get_kernel_config() - if kernel_config.connection_path is not None: - path = Path(kernel_config.connection_path) - asyncio.create_task(watch_connection_files(path)) - - -@router.on_event("shutdown") -async def stop_kernels(): - for kernel in kernels.values(): - await kernel["server"].stop() - - -@router.get("/api/kernelspecs") -async def get_kernelspecs( - frontend_config=Depends(get_frontend_config), - kernel_config=Depends(get_kernel_config), - user: User = Depends(current_user(permissions={"kernelspecs": ["read"]})), -): - for search_path in kernelspec_dirs(): - for path in Path(search_path).glob("*/kernel.json"): - with open(path) as f: - spec = json.load(f) - name = path.parent.name - resources = { - f.stem: f"{frontend_config.base_url}kernelspecs/{name}/{f.name}" - for f in path.parent.iterdir() - if f.is_file() and f.name != "kernel.json" - } - kernelspecs[name] = {"name": name, "spec": spec, "resources": resources} - return {"default": kernel_config.default_kernel, "kernelspecs": kernelspecs} - - -@router.get("/kernelspecs/{kernel_name}/{file_name}") -async def get_kernelspec( - kernel_name, - file_name, - user: User = Depends(current_user()), -): - for search_path in kernelspec_dirs(): - file_path = Path(search_path) / kernel_name / file_name - if file_path.exists(): - return FileResponse(file_path) - raise HTTPException(status_code=404, detail=f"Kernelspec {kernel_name}/{file_name} not found") - - -@router.get("/api/kernels") -async def get_kernels( - user: User = Depends(current_user(permissions={"kernels": ["read"]})), -): - results = [] - for kernel_id, kernel in kernels.items(): - if kernel["server"]: - connections = kernel["server"].connections - last_activity = kernel["server"].last_activity["date"] - execution_state = kernel["server"].last_activity["execution_state"] - else: - connections = 0 - last_activity = "" - execution_state = "idle" - results.append( - { - "id": kernel_id, - "name": kernel["name"], - "connections": connections, - "last_activity": last_activity, - "execution_state": execution_state, - } - ) - return results - - -@router.delete("/api/sessions/{session_id}", status_code=204) -async def delete_session( - session_id: str, - user: User = Depends(current_user(permissions={"sessions": ["write"]})), -): - kernel_id = sessions[session_id]["kernel"]["id"] - kernel_server = kernels[kernel_id]["server"] - await kernel_server.stop() - del kernels[kernel_id] - if kernel_id in kernel_id_to_connection_file: - del kernel_id_to_connection_file[kernel_id] - del sessions[session_id] - return Response(status_code=HTTPStatus.NO_CONTENT.value) -@router.patch("/api/sessions/{session_id}") -async def rename_session( - request: Request, - user: User = Depends(current_user(permissions={"sessions": ["write"]})), -): - rename_session = await request.json() - session_id = rename_session.pop("id") - for key, value in rename_session.items(): - sessions[session_id][key] = value - return Session(**sessions[session_id]) - - -@router.get("/api/sessions") -async def get_sessions( - user: User = Depends(current_user(permissions={"sessions": ["read"]})), -): - for session in sessions.values(): - kernel_id = session["kernel"]["id"] - kernel_server = kernels[kernel_id]["server"] - session["kernel"]["last_activity"] = kernel_server.last_activity["date"] - session["kernel"]["execution_state"] = kernel_server.last_activity["execution_state"] - return list(sessions.values()) - +class _Kernels(Kernels): + def __init__( + self, + app: App, + kernels_config: KernelsConfig, + auth: Auth, + frontend_config: FrontendConfig, + yjs: Yjs, + ) -> None: + super().__init__(app) + + router = APIRouter() + + kernelspecs: dict = {} + self.kernel_id_to_connection_file: Dict[str, str] = {} + sessions: Dict[str, Session] = {} + Path(sys.prefix) + + @router.get("/api/kernelspecs") + async def get_kernelspecs( + user: User = Depends(auth.current_user(permissions={"kernelspecs": ["read"]})), + ): + for search_path in kernelspec_dirs(): + for path in Path(search_path).glob("*/kernel.json"): + with open(path) as f: + spec = json.load(f) + name = path.parent.name + resources = { + f.stem: f"{frontend_config.base_url}kernelspecs/{name}/{f.name}" + for f in path.parent.iterdir() + if f.is_file() and f.name != "kernel.json" + } + kernelspecs[name] = {"name": name, "spec": spec, "resources": resources} + return {"default": kernels_config.default_kernel, "kernelspecs": kernelspecs} + + @router.get("/kernelspecs/{kernel_name}/{file_name}") + async def get_kernelspec( + kernel_name, + file_name, + user: User = Depends(auth.current_user()), + ): + for search_path in kernelspec_dirs(): + file_path = Path(search_path) / kernel_name / file_name + if file_path.exists(): + return FileResponse(file_path) + raise HTTPException( + status_code=404, detail=f"Kernelspec {kernel_name}/{file_name} not found" + ) -@router.post( - "/api/sessions", - status_code=201, - response_model=Session, -) -async def create_session( - request: Request, - user: User = Depends(current_user(permissions={"sessions": ["write"]})), -): - create_session = CreateSession(**(await request.json())) - kernel_id = create_session.kernel.id - kernel_name = create_session.kernel.name - if kernel_name is not None: - # launch a new ("internal") kernel - kernel_server = KernelServer( - kernelspec_path=Path(find_kernelspec(kernel_name)).as_posix(), - kernel_cwd=str(Path(create_session.path).parent), - ) - kernel_id = str(uuid.uuid4()) - kernels[kernel_id] = {"name": kernel_name, "server": kernel_server, "driver": None} - await kernel_server.start() - elif kernel_id is not None: - # external kernel - kernel_name = kernels[kernel_id]["name"] - kernel_server = KernelServer( - connection_file=kernel_id_to_connection_file[kernel_id], - write_connection_file=False, + @router.get("/api/kernels") + async def get_kernels( + user: User = Depends(auth.current_user(permissions={"kernels": ["read"]})), + ): + results = [] + for kernel_id, kernel in kernels.items(): + if kernel["server"]: + connections = kernel["server"].connections + last_activity = kernel["server"].last_activity["date"] + execution_state = kernel["server"].last_activity["execution_state"] + else: + connections = 0 + last_activity = "" + execution_state = "idle" + results.append( + { + "id": kernel_id, + "name": kernel["name"], + "connections": connections, + "last_activity": last_activity, + "execution_state": execution_state, + } + ) + return results + + @router.delete("/api/sessions/{session_id}", status_code=204) + async def delete_session( + session_id: str, + user: User = Depends(auth.current_user(permissions={"sessions": ["write"]})), + ): + kernel_id = sessions[session_id].kernel.id + kernel_server = kernels[kernel_id]["server"] + await kernel_server.stop() + del kernels[kernel_id] + if kernel_id in self.kernel_id_to_connection_file: + del self.kernel_id_to_connection_file[kernel_id] + del sessions[session_id] + return Response(status_code=HTTPStatus.NO_CONTENT.value) + + @router.patch("/api/sessions/{session_id}") + async def rename_session( + request: Request, + user: User = Depends(auth.current_user(permissions={"sessions": ["write"]})), + ): + rename_session = await request.json() + session_id = rename_session.pop("id") + for key, value in rename_session.items(): + setattr(sessions[session_id], key, value) + return sessions[session_id] + + @router.get("/api/sessions") + async def get_sessions( + user: User = Depends(auth.current_user(permissions={"sessions": ["read"]})), + ): + for session in sessions.values(): + kernel_id = session.kernel.id + kernel_server = kernels[kernel_id]["server"] + session.kernel.last_activity = kernel_server.last_activity["date"] + session.kernel.execution_state = kernel_server.last_activity["execution_state"] + return list(sessions.values()) + + @router.post( + "/api/sessions", + status_code=201, + response_model=Session, ) - kernels[kernel_id]["server"] = kernel_server - await kernel_server.start(launch_kernel=False) - else: - return - session_id = str(uuid.uuid4()) - session = { - "id": session_id, - "path": create_session.path, - "name": create_session.name, - "type": create_session.type, - "kernel": { - "id": kernel_id, - "name": kernel_name, - "connections": kernel_server.connections, - "last_activity": kernel_server.last_activity["date"], - "execution_state": kernel_server.last_activity["execution_state"], - }, - "notebook": {"path": create_session.path, "name": create_session.name}, - } - sessions[session_id] = session - return Session(**session) - - -@router.post("/api/kernels/{kernel_id}/restart") -async def restart_kernel( - kernel_id, - user: User = Depends(current_user(permissions={"kernels": ["write"]})), -): - if kernel_id in kernels: - kernel = kernels[kernel_id] - await kernel["server"].restart() - result = { - "id": kernel_id, - "name": kernel["name"], - "connections": kernel["server"].connections, - "last_activity": kernel["server"].last_activity["date"], - "execution_state": kernel["server"].last_activity["execution_state"], - } - return result - - -@router.post("/api/kernels/{kernel_id}/execute") -async def execute_cell( - request: Request, - kernel_id, - user: User = Depends(current_user(permissions={"kernels": ["write"]})), -): - r = await request.json() - execution = Execution(**r) - if kernel_id in kernels: - ynotebook = YDocWebSocketHandler.websocket_server.get_room(execution.document_id).document - cell = ynotebook.get_cell(execution.cell_idx) - cell["outputs"] = [] - - kernel = kernels[kernel_id] - if not kernel["driver"]: - kernel["driver"] = driver = KernelDriver( - kernelspec_path=Path(find_kernelspec(kernel["name"])).as_posix(), - write_connection_file=False, - connection_file=kernel["server"].connection_file_path, + async def create_session( + request: Request, + user: User = Depends(auth.current_user(permissions={"sessions": ["write"]})), + ): + create_session = CreateSession(**(await request.json())) + kernel_id = create_session.kernel.id + kernel_name = create_session.kernel.name + if kernel_name is not None: + # launch a new ("internal") kernel + kernel_server = KernelServer( + kernelspec_path=Path(find_kernelspec(kernel_name)).as_posix(), + kernel_cwd=str(Path(create_session.path).parent), + ) + kernel_id = str(uuid.uuid4()) + kernels[kernel_id] = {"name": kernel_name, "server": kernel_server, "driver": None} + await kernel_server.start() + elif kernel_id is not None: + # external kernel + kernel_name = kernels[kernel_id]["name"] + kernel_server = KernelServer( + connection_file=self.kernel_id_to_connection_file[kernel_id], + write_connection_file=False, + ) + kernels[kernel_id]["server"] = kernel_server + await kernel_server.start(launch_kernel=False) + else: + return + session_id = str(uuid.uuid4()) + session = Session( + id=session_id, + path=create_session.path, + name=create_session.name, + type=create_session.type, + kernel=Kernel( + id=kernel_id, + name=kernel_name, + connections=kernel_server.connections, + last_activity=kernel_server.last_activity["date"], + execution_state=kernel_server.last_activity["execution_state"], + ), + notebook=Notebook( + path=create_session.path, + name=create_session.name, + ), ) - await driver.connect() - driver = kernel["driver"] - - await driver.execute(cell) - ynotebook.set_cell(execution.cell_idx, cell) - - -@router.get("/api/kernels/{kernel_id}") -async def get_kernel( - kernel_id, - user: User = Depends(current_user(permissions={"kernels": ["read"]})), -): - if kernel_id in kernels: - kernel = kernels[kernel_id] - result = { - "id": kernel_id, - "name": kernel["name"], - "connections": kernel["server"].connections, - "last_activity": kernel["server"].last_activity["date"], - "execution_state": kernel["server"].last_activity["execution_state"], - } - return result - - -@router.delete("/api/kernels/{kernel_id}", status_code=204) -async def shutdown_kernel( - kernel_id, - user: User = Depends(current_user(permissions={"kernels": ["write"]})), -): - if kernel_id in kernels: - await kernels[kernel_id]["server"].stop() - del kernels[kernel_id] - for session_id in [k for k, v in sessions.items() if v["kernel"]["id"] == kernel_id]: - del sessions[session_id] - return Response(status_code=HTTPStatus.NO_CONTENT.value) - - -@router.websocket("/api/kernels/{kernel_id}/channels") -async def kernel_channels( - kernel_id, - session_id, - websocket_permissions=Depends(websocket_auth(permissions={"kernels": ["execute"]})), -): - if websocket_permissions is None: - return - websocket, permissions = websocket_permissions - subprotocol = ( - "v1.kernel.websocket.jupyter.org" - if "v1.kernel.websocket.jupyter.org" in websocket["subprotocols"] - else None - ) - await websocket.accept(subprotocol=subprotocol) - accepted_websocket = AcceptedWebSocket(websocket, subprotocol) - if kernel_id in kernels: - kernel_server = kernels[kernel_id]["server"] - if kernel_server is None: - # this is an external kernel - # kernel is already launched, just start a kernel server - kernel_server = KernelServer( - connection_file=kernel_id, - write_connection_file=False, + sessions[session_id] = session + return session + + @router.post("/api/kernels/{kernel_id}/interrupt") + async def interrupt_kernel( + kernel_id, + user: User = Depends(auth.current_user(permissions={"kernels": ["write"]})), + ): + if kernel_id in kernels: + kernel = kernels[kernel_id] + kernel["server"].interrupt() + result = { + "id": kernel_id, + "name": kernel["name"], + "connections": kernel["server"].connections, + "last_activity": kernel["server"].last_activity["date"], + "execution_state": kernel["server"].last_activity["execution_state"], + } + return result + + @router.post("/api/kernels/{kernel_id}/restart") + async def restart_kernel( + kernel_id, + user: User = Depends(auth.current_user(permissions={"kernels": ["write"]})), + ): + if kernel_id in kernels: + kernel = kernels[kernel_id] + await kernel["server"].restart() + result = { + "id": kernel_id, + "name": kernel["name"], + "connections": kernel["server"].connections, + "last_activity": kernel["server"].last_activity["date"], + "execution_state": kernel["server"].last_activity["execution_state"], + } + return result + + @router.post("/api/kernels/{kernel_id}/execute") + async def execute_cell( + request: Request, + kernel_id, + user: User = Depends(auth.current_user(permissions={"kernels": ["write"]})), + ): + r = await request.json() + execution = Execution(**r) + if kernel_id in kernels: + ynotebook = yjs.YDocWebSocketHandler.websocket_server.get_room( + execution.document_id + ).document + cell = ynotebook.get_cell(execution.cell_idx) + cell["outputs"] = [] + + kernel = kernels[kernel_id] + if not kernel["driver"]: + kernel["driver"] = driver = KernelDriver( + kernelspec_path=Path(find_kernelspec(kernel["name"])).as_posix(), + write_connection_file=False, + connection_file=kernel["server"].connection_file_path, + ) + await driver.connect() + driver = kernel["driver"] + + await driver.execute(cell) + ynotebook.set_cell(execution.cell_idx, cell) + + @router.get("/api/kernels/{kernel_id}") + async def get_kernel( + kernel_id, + user: User = Depends(auth.current_user(permissions={"kernels": ["read"]})), + ): + if kernel_id in kernels: + kernel = kernels[kernel_id] + result = { + "id": kernel_id, + "name": kernel["name"], + "connections": kernel["server"].connections, + "last_activity": kernel["server"].last_activity["date"], + "execution_state": kernel["server"].last_activity["execution_state"], + } + return result + + @router.delete("/api/kernels/{kernel_id}", status_code=204) + async def shutdown_kernel( + kernel_id, + user: User = Depends(auth.current_user(permissions={"kernels": ["write"]})), + ): + if kernel_id in kernels: + await kernels[kernel_id]["server"].stop() + del kernels[kernel_id] + for session_id in [k for k, v in sessions.items() if v.kernel.id == kernel_id]: + del sessions[session_id] + return Response(status_code=HTTPStatus.NO_CONTENT.value) + + @router.websocket("/api/kernels/{kernel_id}/channels") + async def kernel_channels( + kernel_id, + session_id, + websocket_permissions=Depends( + auth.websocket_auth(permissions={"kernels": ["execute"]}) + ), + ): + if websocket_permissions is None: + return + websocket, permissions = websocket_permissions + subprotocol = ( + "v1.kernel.websocket.jupyter.org" + if "v1.kernel.websocket.jupyter.org" in websocket["subprotocols"] + else None ) - await kernel_server.start(launch_kernel=False) - kernels[kernel_id]["server"] = kernel_server - await kernel_server.serve(accepted_websocket, session_id, permissions) - - -r = register_router(router) + await websocket.accept(subprotocol=subprotocol) + accepted_websocket = AcceptedWebSocket(websocket, subprotocol) + if kernel_id in kernels: + kernel_server = kernels[kernel_id]["server"] + if kernel_server is None: + # this is an external kernel + # kernel is already launched, just start a kernel server + kernel_server = KernelServer( + connection_file=kernel_id, + write_connection_file=False, + ) + await kernel_server.start(launch_kernel=False) + kernels[kernel_id]["server"] = kernel_server + await kernel_server.serve(accepted_websocket, session_id, permissions) + + self.include_router(router) + + self.kernels = kernels + + async def watch_connection_files(self, path: Path) -> None: + # first time scan, treat everything as added files + initial_changes = {(Change.added, str(p)) for p in path.iterdir()} + await self.process_connection_files(initial_changes) + # then, on every change + async for changes in awatch(path): + await self.process_connection_files(changes) + + async def process_connection_files(self, changes: Set[Tuple[Change, str]]): + # get rid of "simultaneously" added/deleted files + file_changes: Dict[str, List[Change]] = {} + for c in changes: + change, path = c + if path not in file_changes: + file_changes[path] = [] + file_changes[path].append(change) + to_delete: List[str] = [] + for p, cs in file_changes.items(): + if Change.added in cs and Change.deleted in cs: + cs.remove(Change.added) + cs.remove(Change.deleted) + if not cs: + to_delete.append(p) + for p in to_delete: + del file_changes[p] + # process file changes + for path, cs in file_changes.items(): + for change in cs: + if change == Change.deleted: + if path in kernels: + kernel_id = list(self.kernel_id_to_connection_file.keys())[ + list(self.kernel_id_to_connection_file.values()).index(path) + ] + del kernels[kernel_id] + elif change == Change.added: + try: + data = json.loads(Path(path).read_text()) + except BaseException: + continue + if "kernel_name" not in data or "key" not in data: + continue + # looks like a kernel connection file + kernel_id = str(uuid.uuid4()) + self.kernel_id_to_connection_file[kernel_id] = path + kernels[kernel_id] = { + "name": data["kernel_name"], + "server": None, + "driver": None, + } diff --git a/plugins/kernels/pyproject.toml b/plugins/kernels/pyproject.toml index 4a782399..969745fd 100644 --- a/plugins/kernels/pyproject.toml +++ b/plugins/kernels/pyproject.toml @@ -5,9 +5,15 @@ build-backend = "hatchling.build" [project] name = "fps_kernels" description = "An FPS plugin for the kernels API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "fps-frontend", "fps-yjs", "pyzmq", "websockets", "python-dateutil", "watchfiles >=0.16.1,<1"] +dependencies = [ + "pyzmq", + "websockets", + "python-dateutil", + "types-python-dateutil", + "watchfiles >=0.16.1,<1", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,11 +35,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-kernels = "fps_kernels.routes" - -[project.entry-points.fps_config] -fps-kernels = "fps_kernels.config" +[project.entry-points."asphalt.components"] +kernels = "fps_kernels.main:KernelsComponent" [tool.hatch.version] path = "fps_kernels/__init__.py" diff --git a/plugins/lab/fps_lab/config.py b/plugins/lab/fps_lab/config.py deleted file mode 100644 index 374a0e80..00000000 --- a/plugins/lab/fps_lab/config.py +++ /dev/null @@ -1,13 +0,0 @@ -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore - - -class LabConfig(PluginModel): - collaborative: bool = False - - -def get_lab_config(): - return get_config(LabConfig) - - -c = register_config(LabConfig) diff --git a/plugins/lab/fps_lab/main.py b/plugins/lab/fps_lab/main.py new file mode 100644 index 00000000..c03a42e6 --- /dev/null +++ b/plugins/lab/fps_lab/main.py @@ -0,0 +1,22 @@ +from asphalt.core import Component, Context +from jupyverse_api.app import App +from jupyverse_api.auth import Auth +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.jupyterlab import JupyterLabConfig +from jupyverse_api.lab import Lab + +from .routes import _Lab + + +class LabComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + frontend_config = await ctx.request_resource(FrontendConfig) + jupyterlab_config = ctx.get_resource(JupyterLabConfig) + + lab = _Lab(app, auth, frontend_config, jupyterlab_config) + ctx.add_resource(lab, types=Lab) diff --git a/plugins/lab/fps_lab/routes.py b/plugins/lab/fps_lab/routes.py index 193834e9..25ffb740 100644 --- a/plugins/lab/fps_lab/routes.py +++ b/plugins/lab/fps_lab/routes.py @@ -1,217 +1,253 @@ import json +import logging +import os import sys +from glob import glob from http import HTTPStatus from pathlib import Path +from typing import Any, Dict, List, Optional, Tuple import json5 # type: ignore -import pkg_resources # type: ignore -from babel import Locale # type: ignore -from fastapi import Depends, Response, status +import pkg_resources +from babel import Locale +from fastapi import APIRouter, Depends, Response, status from fastapi.responses import FileResponse, RedirectResponse from fastapi.staticfiles import StaticFiles -from fps_auth_base import User, current_user, update_user # type: ignore -from fps_frontend.config import get_frontend_config # type: ignore -from starlette.requests import Request # type: ignore +from starlette.requests import Request -import jupyverse # type: ignore +import jupyverse +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.jupyterlab import JupyterLabConfig +from jupyverse_api.lab import Lab -from .utils import get_federated_extensions -try: - import jupyterlab # type: ignore - from fps_jupyterlab.config import get_jlab_config # type: ignore +logger = logging.getLogger("lab") - jlab_dev_mode = get_jlab_config().dev_mode -except Exception: - jlab_dev_mode = False +class _Lab(Lab): + def __init__( + self, + app: App, + auth: Auth, + frontend_config: FrontendConfig, + jupyterlab_config: Optional[JupyterLabConfig], + ) -> None: + super().__init__(app) -LOCALE = "en" + self.auth = auth + self.frontend_config = frontend_config + if jupyterlab_config is not None: + import jupyterlab as jupyterlab_module # type: ignore -prefix_dir = Path(sys.prefix) -if jlab_dev_mode: - jlab_dir = Path(jupyterlab.__file__).parents[1] / "dev_mode" -else: - jlab_dir = prefix_dir / "share" / "jupyter" / "lab" - + jlab_dev_mode = jupyterlab_config.dev_mode + else: + jlab_dev_mode = False -def init_router(router, redirect_after_root): - extensions_dir = prefix_dir / "share" / "jupyter" / "labextensions" - federated_extensions, disabled_extensions = get_federated_extensions(extensions_dir) + self.locale = "en" - for ext in federated_extensions: - name = ext["name"] - router.mount( - f"/lab/extensions/{name}/static", - StaticFiles(directory=extensions_dir / name / "static"), - name=name, - ) - - router.mount( - "/lab/api/themes", - StaticFiles(directory=jlab_dir / "themes"), - name="themes", - ) - - @router.get("/", name="root") - async def get_root( - response: Response, - frontend_config=Depends(get_frontend_config), - user: User = Depends(current_user()), - ): - # auto redirect - response.status_code = status.HTTP_302_FOUND - response.headers["Location"] = frontend_config.base_url + redirect_after_root - - @router.get("/favicon.ico") - async def get_favicon(): - return FileResponse(Path(jupyverse.__file__).parent / "static" / "favicon.ico") - - @router.get("/static/notebook/components/MathJax/{rest_of_path:path}") - async def get_mathjax(rest_of_path): - return RedirectResponse( - "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/" + rest_of_path + self.prefix_dir = Path(sys.prefix) + if jlab_dev_mode: + self.jlab_dir = Path(jupyterlab_module.__file__).parents[1] / "dev_mode" + else: + self.jlab_dir = self.prefix_dir / "share" / "jupyter" / "lab" + + def init_router( + self, router: APIRouter, redirect_after_root: str + ) -> Tuple[Path, List[Dict[str, Any]]]: + extensions_dir = self.prefix_dir / "share" / "jupyter" / "labextensions" + federated_extensions, disabled_extensions = self.get_federated_extensions(extensions_dir) + + for ext in federated_extensions: + name = ext["name"] + self.mount( + f"/lab/extensions/{name}/static", + StaticFiles(directory=extensions_dir / name / "static"), + name=name, + ) + + self.mount( + "/lab/api/themes", + StaticFiles(directory=self.jlab_dir / "themes"), + name="themes", ) - @router.get("/lab/api/listings/@jupyterlab/extensionmanager-extension/listings.json") - async def get_listings(user: User = Depends(current_user())): - return { - "blocked_extensions_uris": [], - "allowed_extensions_uris": [], - "blocked_extensions": [], - "allowed_extensions": [], - } - - @router.get("/lab/api/extensions") - async def get_extensions(user: User = Depends(current_user())): - return federated_extensions - - @router.get("/lab/api/translations/") - async def get_translations_( - frontend_config=Depends(get_frontend_config), - user: User = Depends(current_user()), - ): - return RedirectResponse(f"{frontend_config.base_url}lab/api/translations") - - @router.get("/lab/api/translations") - async def get_translations(user: User = Depends(current_user())): - locale = Locale.parse("en") - display_name = (locale.get_display_name(LOCALE) or "").capitalize() - native_name = (locale.get_display_name() or "").capitalize() - data = { - "en": { - "displayName": display_name, - "nativeName": native_name, + @router.get("/", name="root") + async def get_root( + response: Response, + user: User = Depends(self.auth.current_user()), + ): + # auto redirect + response.status_code = status.HTTP_302_FOUND + response.headers["Location"] = self.frontend_config.base_url + redirect_after_root + + @router.get("/favicon.ico") + async def get_favicon(): + return FileResponse(Path(jupyverse.__file__).parent / "static" / "favicon.ico") + + @router.get("/static/notebook/components/MathJax/{rest_of_path:path}") + async def get_mathjax(rest_of_path): + return RedirectResponse( + "https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.7/" + rest_of_path + ) + + @router.get("/lab/api/listings/@jupyterlab/extensionmanager-extension/listings.json") + async def get_listings(user: User = Depends(self.auth.current_user())): + return { + "blocked_extensions_uris": [], + "allowed_extensions_uris": [], + "blocked_extensions": [], + "allowed_extensions": [], } - } - for ep in pkg_resources.iter_entry_points(group="jupyterlab.languagepack"): - locale = Locale.parse(ep.name) - data[ep.name] = { - "displayName": display_name, - "nativeName": native_name, + + @router.get("/lab/api/extensions") + async def get_extensions(user: User = Depends(self.auth.current_user())): + return federated_extensions + + @router.get("/lab/api/translations/") + async def get_translations_( + user: User = Depends(self.auth.current_user()), + ): + return RedirectResponse(f"{self.frontend_config.base_url}lab/api/translations") + + @router.get("/lab/api/translations") + async def get_translations(user: User = Depends(self.auth.current_user())): + locale = Locale.parse("en") + display_name = (locale.get_display_name(self.locale) or "").capitalize() + native_name = (locale.get_display_name() or "").capitalize() + data = { + "en": { + "displayName": display_name, + "nativeName": native_name, + } } - return {"data": data, "message": ""} - - @router.get("/lab/api/translations/{language}") - async def get_translation( - language, - user: User = Depends(current_user()), - ): - global LOCALE - if language == "en": - LOCALE = language - return {} - for ep in pkg_resources.iter_entry_points(group="jupyterlab.languagepack"): - if ep.name == language: - break - else: - return {"data": {}, "message": f"Language pack '{language}' not installed!"} - LOCALE = language - package = ep.load() - data = {} - for path in (Path(package.__file__).parent / "locale" / language / "LC_MESSAGES").glob( - "*.json" + for ep in pkg_resources.iter_entry_points(group="jupyterlab.languagepack"): + locale = Locale.parse(ep.name) + data[ep.name] = { + "displayName": display_name, + "nativeName": native_name, + } + return {"data": data, "message": ""} + + @router.get("/lab/api/translations/{language}") + async def get_translation( + language, + user: User = Depends(self.auth.current_user()), ): - with open(path) as f: - data.update({path.stem: json.load(f)}) - return {"data": data, "message": ""} - - @router.get("/lab/api/settings/{name0}/{name1}:{name2}") - async def get_setting( - name0, - name1, - name2, - user: User = Depends(current_user()), - ): - with open(jlab_dir / "static" / "package.json") as f: - package = json.load(f) - if name0 in ["@jupyterlab", "@retrolab"]: - schemas_parent = jlab_dir - else: - schemas_parent = extensions_dir / name0 / name1 - with open(schemas_parent / "schemas" / name0 / name1 / f"{name2}.json") as f: - schema = json.load(f) - key = f"{name1}:{name2}" - setting = { - "id": f"@jupyterlab/{key}", - "schema": schema, - "version": package["version"], - "raw": "{}", - "settings": {}, - "last_modified": None, - "created": None, - } - if user: - user_settings = json.loads(user.settings) - if key in user_settings: - setting.update(user_settings[key]) - setting["settings"] = json5.loads(user_settings[key]["raw"]) - return setting - - @router.put( - "/lab/api/settings/@jupyterlab/{name0}:{name1}", - status_code=204, - ) - async def change_setting( - request: Request, - name0, - name1, - user: User = Depends(current_user()), - user_update=Depends(update_user), - ): - settings = json.loads(user.settings) - settings[f"{name0}:{name1}"] = await request.json() - await user_update({"settings": json.dumps(settings)}) - return Response(status_code=HTTPStatus.NO_CONTENT.value) - - @router.get("/lab/api/settings") - async def get_settings(user: User = Depends(current_user())): - with open(jlab_dir / "static" / "package.json") as f: - package = json.load(f) - if user: - user_settings = json.loads(user.settings) - else: - user_settings = {} - settings = [] - for path in (jlab_dir / "schemas" / "@jupyterlab").glob("*/*.json"): - with open(path) as f: + if language == "en": + self.locale = language + return {} + for ep in pkg_resources.iter_entry_points(group="jupyterlab.languagepack"): + if ep.name == language: + break + else: + return {"data": {}, "message": f"Language pack '{language}' not installed!"} + self.locale = language + package = ep.load() + data = {} + for path in (Path(package.__file__).parent / "locale" / language / "LC_MESSAGES").glob( + "*.json" + ): + with open(path) as f: + data.update({path.stem: json.load(f)}) + return {"data": data, "message": ""} + + @router.get("/lab/api/settings/{name0}/{name1}:{name2}") + async def get_setting( + name0, + name1, + name2, + user: User = Depends(self.auth.current_user()), + ): + with open(self.jlab_dir / "static" / "package.json") as f: + package = json.load(f) + if name0 in ["@jupyterlab", "@retrolab"]: + schemas_parent = self.jlab_dir + else: + schemas_parent = extensions_dir / name0 / name1 + with open(schemas_parent / "schemas" / name0 / name1 / f"{name2}.json") as f: schema = json.load(f) - key = f"{path.parent.name}:{path.stem}" + key = f"{name1}:{name2}" setting = { "id": f"@jupyterlab/{key}", "schema": schema, "version": package["version"], "raw": "{}", "settings": {}, - "warning": None, "last_modified": None, "created": None, } - if key in user_settings: - setting.update(user_settings[key]) - setting["settings"] = json5.loads(user_settings[key]["raw"]) - settings.append(setting) - return {"settings": settings} - - return prefix_dir, federated_extensions + if user: + user_settings = json.loads(user.settings) + if key in user_settings: + setting.update(user_settings[key]) + setting["settings"] = json5.loads(user_settings[key]["raw"]) + return setting + + @router.put( + "/lab/api/settings/@jupyterlab/{name0}:{name1}", + status_code=204, + ) + async def change_setting( + request: Request, + name0, + name1, + user: User = Depends(self.auth.current_user()), + user_update=Depends(self.auth.update_user), + ): + settings = json.loads(user.settings) + settings[f"{name0}:{name1}"] = await request.json() + await user_update({"settings": json.dumps(settings)}) + return Response(status_code=HTTPStatus.NO_CONTENT.value) + + @router.get("/lab/api/settings") + async def get_settings(user: User = Depends(self.auth.current_user())): + with open(self.jlab_dir / "static" / "package.json") as f: + package = json.load(f) + if user: + user_settings = json.loads(user.settings) + else: + user_settings = {} + settings = [] + for path in (self.jlab_dir / "schemas" / "@jupyterlab").glob("*/*.json"): + with open(path) as f: + schema = json.load(f) + key = f"{path.parent.name}:{path.stem}" + setting = { + "id": f"@jupyterlab/{key}", + "schema": schema, + "version": package["version"], + "raw": "{}", + "settings": {}, + "warning": None, + "last_modified": None, + "created": None, + } + if key in user_settings: + setting.update(user_settings[key]) + setting["settings"] = json5.loads(user_settings[key]["raw"]) + settings.append(setting) + return {"settings": settings} + + return self.prefix_dir, federated_extensions + + def get_federated_extensions(self, extensions_dir: Path) -> Tuple[List, List]: + federated_extensions = [] + disabled_extensions = [] + + for path in glob(os.path.join(extensions_dir, "**", "package.json"), recursive=True): + with open(path) as f: + package = json.load(f) + if "jupyterlab" not in package: + continue + extension = package["jupyterlab"]["_build"] + extension["name"] = package["name"] + extension["description"] = package["description"] + federated_extensions.append(extension) + + for ext in package["jupyterlab"].get("disabledExtensions", []): + disabled_extensions.append(ext) + + return federated_extensions, disabled_extensions diff --git a/plugins/lab/fps_lab/utils.py b/plugins/lab/fps_lab/utils.py deleted file mode 100644 index cbe8c913..00000000 --- a/plugins/lab/fps_lab/utils.py +++ /dev/null @@ -1,25 +0,0 @@ -import json -import os -from glob import glob -from pathlib import Path -from typing import List, Tuple - - -def get_federated_extensions(extensions_dir: Path) -> Tuple[List, List]: - federated_extensions = [] - disabled_extensions = [] - - for path in glob(os.path.join(extensions_dir, "**", "package.json"), recursive=True): - with open(path) as f: - package = json.load(f) - if "jupyterlab" not in package: - continue - extension = package["jupyterlab"]["_build"] - extension["name"] = package["name"] - extension["description"] = package["description"] - federated_extensions.append(extension) - - for ext in package["jupyterlab"].get("disabledExtensions", []): - disabled_extensions.append(ext) - - return federated_extensions, disabled_extensions diff --git a/plugins/lab/pyproject.toml b/plugins/lab/pyproject.toml index 5d003a4b..75ff1b16 100644 --- a/plugins/lab/pyproject.toml +++ b/plugins/lab/pyproject.toml @@ -5,9 +5,12 @@ build-backend = "hatchling.build" [project] name = "fps_lab" description = "An FPS plugin for the JupyterLab/RetroLab API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "fps-frontend", "babel", "json5",] +dependencies = [ + "babel", + "json5", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,11 +32,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-lab = "fps_lab.routes" - -[project.entry-points.fps_config] -fps-lab = "fps_lab.config" +[project.entry-points."asphalt.components"] +lab = "fps_lab.main:LabComponent" [tool.hatch.version] path = "fps_lab/__init__.py" diff --git a/plugins/login/fps_login/main.py b/plugins/login/fps_login/main.py new file mode 100644 index 00000000..23459edd --- /dev/null +++ b/plugins/login/fps_login/main.py @@ -0,0 +1,18 @@ +from asphalt.core import Component, Context +from jupyverse_api.auth import AuthConfig +from jupyverse_api.login import Login +from jupyverse_api.app import App + +from .routes import _Login + + +class LoginComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth_config = await ctx.request_resource(AuthConfig) + + login = _Login(app, auth_config) + ctx.add_resource(login, types=Login) diff --git a/plugins/login/fps_login/routes.py b/plugins/login/fps_login/routes.py index dd7591ce..ba117254 100644 --- a/plugins/login/fps_login/routes.py +++ b/plugins/login/fps_login/routes.py @@ -1,30 +1,41 @@ from pathlib import Path +from typing import Optional, cast from fastapi import APIRouter from fastapi.responses import FileResponse from fastapi.staticfiles import StaticFiles -from fps.hooks import register_router # type: ignore -from fps.logging import get_configured_logger # type: ignore -from fps_auth.config import get_auth_config # type: ignore +from jupyverse_api.app import App +from jupyverse_api.auth import AuthConfig +from jupyverse_api.login import Login -logger = get_configured_logger("login") -router = APIRouter() -prefix_static = Path(__file__).parent / "static" +class _AuthConfig(AuthConfig): + login_url: Optional[str] -auth_config = get_auth_config() -auth_config.login_url = "/login" -router.mount( - "/static/login", - StaticFiles(directory=prefix_static), - name="static", -) +class _Login(Login): + def __init__( + self, + app: App, + auth_config: AuthConfig, + ) -> None: + super().__init__(app) + router = APIRouter() + prefix_static = Path(__file__).parent / "static" -@router.get("/login") -async def api_login(): - return FileResponse(prefix_static / "index.html") + # fps_login needs an AuthConfig that has a login_url, such as fps_auth's config + auth_config = cast(_AuthConfig, auth_config) + auth_config.login_url = "/login" + self.mount( + "/static/login", + StaticFiles(directory=prefix_static), + name="static", + ) -r = register_router(router) + @router.get("/login") + async def api_login(): + return FileResponse(prefix_static / "index.html") + + self.include_router(router) diff --git a/plugins/login/pyproject.toml b/plugins/login/pyproject.toml index d64ece00..d2d41020 100644 --- a/plugins/login/pyproject.toml +++ b/plugins/login/pyproject.toml @@ -5,9 +5,8 @@ build-backend = "hatchling.build" [project] name = "fps_login" description = "An FPS plugin for the login API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8",] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,8 +28,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-login = "fps_login.routes" +[project.entry-points."asphalt.components"] +login = "fps_login.main:LoginComponent" [tool.hatch.version] path = "fps_login/__init__.py" diff --git a/plugins/nbconvert/fps_nbconvert/main.py b/plugins/nbconvert/fps_nbconvert/main.py new file mode 100644 index 00000000..b5c8d98f --- /dev/null +++ b/plugins/nbconvert/fps_nbconvert/main.py @@ -0,0 +1,18 @@ +from asphalt.core import Component, Context +from jupyverse_api.auth import Auth +from jupyverse_api.app import App +from jupyverse_api.nbconvert import Nbconvert + +from .routes import _Nbconvert + + +class NbconvertComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + + nbconvert = _Nbconvert(app, auth) + ctx.add_resource(nbconvert, types=Nbconvert) diff --git a/plugins/nbconvert/fps_nbconvert/routes.py b/plugins/nbconvert/fps_nbconvert/routes.py index 2d70b211..0c499ad3 100644 --- a/plugins/nbconvert/fps_nbconvert/routes.py +++ b/plugins/nbconvert/fps_nbconvert/routes.py @@ -1,40 +1,47 @@ import tempfile from pathlib import Path -import nbconvert # type: ignore +import nbconvert from fastapi import APIRouter, Depends from fastapi.responses import FileResponse -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user # type: ignore - -router = APIRouter() - - -@router.get("/api/nbconvert") -async def get_nbconvert_formats(): - return { - name: {"output_mimetype": nbconvert.exporters.get_exporter(name).output_mimetype} - for name in nbconvert.exporters.get_export_names() - } - - -@router.get("/nbconvert/{format}/{path}") -async def get_nbconvert_document( - format: str, - path: str, - download: bool, - user: User = Depends(current_user(permissions={"nbconvert": ["read"]})), -): - exporter = nbconvert.exporters.get_exporter(format) - if download: - media_type = "application/octet-stream" - else: - media_type = exporter.output_mimetype - tmp_dir = Path(tempfile.mkdtemp()) - tmp_path = tmp_dir / (Path(path).stem + exporter().file_extension) - with open(tmp_path, "wt") as f: - f.write(exporter().from_filename(path)[0]) - return FileResponse(tmp_path, media_type=media_type, filename=tmp_path.name) - - -r = register_router(router) +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.nbconvert import Nbconvert + + +class _Nbconvert(Nbconvert): + def __init__( + self, + app: App, + auth: Auth, + ) -> None: + super().__init__(app) + + router = APIRouter() + + @router.get("/api/nbconvert") + async def get_nbconvert_formats(): + return { + name: {"output_mimetype": nbconvert.exporters.get_exporter(name).output_mimetype} + for name in nbconvert.exporters.get_export_names() + } + + @router.get("/nbconvert/{format}/{path}") + async def get_nbconvert_document( + format: str, + path: str, + download: bool, + user: User = Depends(auth.current_user(permissions={"nbconvert": ["read"]})), + ): + exporter = nbconvert.exporters.get_exporter(format) + if download: + media_type = "application/octet-stream" + else: + media_type = exporter.output_mimetype + tmp_dir = Path(tempfile.mkdtemp()) + tmp_path = tmp_dir / (Path(path).stem + exporter().file_extension) + with open(tmp_path, "wt") as f: + f.write(exporter().from_filename(path)[0]) + return FileResponse(tmp_path, media_type=media_type, filename=tmp_path.name) + + self.include_router(router) diff --git a/plugins/nbconvert/pyproject.toml b/plugins/nbconvert/pyproject.toml index cff59a7a..841ea4e2 100644 --- a/plugins/nbconvert/pyproject.toml +++ b/plugins/nbconvert/pyproject.toml @@ -5,9 +5,11 @@ build-backend = "hatchling.build" [project] name = "fps_nbconvert" description = "An FPS plugin for the nbconvert API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "nbconvert",] +dependencies = [ + "nbconvert", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,8 +31,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-nbconvert = "fps_nbconvert.routes" +[project.entry-points."asphalt.components"] +nbconvert = "fps_nbconvert.main:NbconvertComponent" [tool.hatch.version] path = "fps_nbconvert/__init__.py" diff --git a/plugins/noauth/fps_noauth/backends.py b/plugins/noauth/fps_noauth/backends.py index 5bbb97bf..d042eb57 100644 --- a/plugins/noauth/fps_noauth/backends.py +++ b/plugins/noauth/fps_noauth/backends.py @@ -1,34 +1,32 @@ from typing import Any, Dict, List, Optional, Tuple from fastapi import WebSocket - -from .models import User +from jupyverse_api.auth import Auth, User USER = User() -def current_user(*args, **kwargs): - async def _(): - return USER - - return _ - +class _NoAuth(Auth): + def current_user(self, *args, **kwargs): + async def _(): + return USER -def websocket_auth(permissions: Optional[Dict[str, List[str]]] = None): - async def _( - websocket: WebSocket, - ) -> Optional[Tuple[WebSocket, Optional[Dict[str, List[str]]]]]: - return websocket, permissions + return _ - return _ + def websocket_auth(self, permissions: Optional[Dict[str, List[str]]] = None): + async def _( + websocket: WebSocket, + ) -> Optional[Tuple[WebSocket, Optional[Dict[str, List[str]]]]]: + return websocket, permissions + return _ -async def update_user(): - async def _(data: Dict[str, Any]) -> User: - global USER - user = dict(USER) - user.update(data) - USER = User(**user) - return USER + async def update_user(self): + async def _(data: Dict[str, Any]) -> User: + global USER + user = dict(USER) + user.update(data) + USER = User(**user) + return USER - return _ + return _ diff --git a/plugins/noauth/fps_noauth/main.py b/plugins/noauth/fps_noauth/main.py new file mode 100644 index 00000000..76509c6c --- /dev/null +++ b/plugins/noauth/fps_noauth/main.py @@ -0,0 +1,13 @@ +from asphalt.core import Component, Context +from jupyverse_api.auth import Auth + +from .backends import _NoAuth + + +class NoAuthComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + no_auth = _NoAuth() + ctx.add_resource(no_auth, types=Auth) diff --git a/plugins/noauth/pyproject.toml b/plugins/noauth/pyproject.toml index cd428f5d..4ea3aa39 100644 --- a/plugins/noauth/pyproject.toml +++ b/plugins/noauth/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "hatchling.build" [project] name = "fps_noauth" description = "An FPS plugin for an unprotected API" -keywords = ["jupyter", "server", "fastapi", "pluggy", "plugins"] +keywords = ["jupyter", "server", "fastapi", "plugins"] dynamic = ["version"] requires-python = ">=3.8" @@ -23,17 +23,14 @@ text = "BSD 3-Clause License" [project.urls] Homepage = "https://jupyter.org" +[project.entry-points."asphalt.components"] +noauth = "fps_noauth.main:NoAuthComponent" + [tool.check-manifest] ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.jupyverse_auth] -User = "fps_noauth.models:User" -current_user = "fps_noauth.backends:current_user" -update_user = "fps_noauth.backends:update_user" -websocket_auth = "fps_noauth.backends:websocket_auth" - [tool.hatch.version] path = "fps_noauth/__init__.py" diff --git a/plugins/resource_usage/fps_resource_usage/config.py b/plugins/resource_usage/fps_resource_usage/config.py deleted file mode 100644 index 3e75ffae..00000000 --- a/plugins/resource_usage/fps_resource_usage/config.py +++ /dev/null @@ -1,18 +0,0 @@ -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore -from pydantic import BaseSettings # type: ignore - - -class ResourceUsageConfig(PluginModel, BaseSettings): - mem_limit: int = 0 - mem_warning_threshold: int = 0 - track_cpu_percent: bool = False - cpu_limit: int = 0 - cpu_warning_threshold: int = 0 - - -def get_resource_usage_config(): - return get_config(ResourceUsageConfig) - - -c = register_config(ResourceUsageConfig) diff --git a/plugins/resource_usage/fps_resource_usage/main.py b/plugins/resource_usage/fps_resource_usage/main.py new file mode 100644 index 00000000..8eafa1bd --- /dev/null +++ b/plugins/resource_usage/fps_resource_usage/main.py @@ -0,0 +1,21 @@ +from asphalt.core import Component, Context +from jupyverse_api.auth import Auth +from jupyverse_api.app import App +from jupyverse_api.resource_usage import ResourceUsage, ResourceUsageConfig + +from .routes import _ResourceUsage + + +class ResourceUsageComponent(Component): + def __init__(self, **kwargs): + self.resource_usage_config = ResourceUsageConfig(**kwargs) + + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + + resource_usage = _ResourceUsage(app, auth, self.resource_usage_config) + ctx.add_resource(resource_usage, types=ResourceUsage) diff --git a/plugins/resource_usage/fps_resource_usage/routes.py b/plugins/resource_usage/fps_resource_usage/routes.py index 034b2d3a..b80c3f77 100644 --- a/plugins/resource_usage/fps_resource_usage/routes.py +++ b/plugins/resource_usage/fps_resource_usage/routes.py @@ -1,53 +1,65 @@ -import psutil # type: ignore +import psutil from anyio import to_thread -from fastapi import APIRouter, Depends # type: ignore -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user # type: ignore - -from .config import get_resource_usage_config - -router = APIRouter() - - -@router.get("/api/metrics/v1") -async def get_content( - user: User = Depends(current_user(permissions={"contents": ["read"]})), - config=Depends(get_resource_usage_config), -): - cur_process = psutil.Process() - all_processes = [cur_process] + cur_process.children(recursive=True) - - # Get memory information - rss = 0 - for p in all_processes: - try: - rss += p.memory_info().rss - except (psutil.NoSuchProcess, psutil.AccessDenied): - pass - - mem_limit = config.mem_limit +from fastapi import APIRouter, Depends +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.resource_usage import ResourceUsage, ResourceUsageConfig + + +class _ResourceUsage(ResourceUsage): + def __init__( + self, + app: App, + auth: Auth, + resource_usage_config: ResourceUsageConfig, + ): + super().__init__(app) + + router = APIRouter() + + @router.get("/api/metrics/v1") + async def get_metrics( + user: User = Depends(auth.current_user(permissions={"contents": ["read"]})), + ): + cur_process = psutil.Process() + all_processes = [cur_process] + cur_process.children(recursive=True) + + # Get memory information + rss = 0 + for p in all_processes: + try: + rss += p.memory_info().rss + except (psutil.NoSuchProcess, psutil.AccessDenied): + pass + + mem_limit = resource_usage_config.mem_limit + + limits = {"memory": {"rss": mem_limit}} + if resource_usage_config.mem_limit and resource_usage_config.mem_warning_threshold: + limits["memory"]["warn"] = (mem_limit - rss) < ( + mem_limit * resource_usage_config.mem_warning_threshold + ) - limits = {"memory": {"rss": mem_limit}} - if config.mem_limit and config.mem_warning_threshold: - limits["memory"]["warn"] = (mem_limit - rss) < (mem_limit * config.mem_warning_threshold) + metrics = {"rss": rss, "limits": limits} - metrics = {"rss": rss, "limits": limits} + # Optionally get CPU information + if resource_usage_config.track_cpu_percent: + cpu_count = psutil.cpu_count() + cpu_percent = await to_thread.run_sync(_get_cpu_percent, all_processes) - # Optionally get CPU information - if config.track_cpu_percent: - cpu_count = psutil.cpu_count() - cpu_percent = await to_thread.run_sync(_get_cpu_percent, all_processes) + if resource_usage_config.cpu_limit: + limits["cpu"] = {"cpu": resource_usage_config.cpu_limit} + if resource_usage_config.cpu_warning_threshold: + limits["cpu"]["warn"] = (resource_usage_config.cpu_limit - cpu_percent) < ( + resource_usage_config.cpu_limit + * resource_usage_config.cpu_warning_threshold + ) - if config.cpu_limit: - limits["cpu"] = {"cpu": config.cpu_limit} - if config.cpu_warning_threshold: - limits["cpu"]["warn"] = (config.cpu_limit - cpu_percent) < ( - config.cpu_limit * config.cpu_warning_threshold - ) + metrics.update(cpu_percent=cpu_percent, cpu_count=cpu_count) - metrics.update(cpu_percent=cpu_percent, cpu_count=cpu_count) + return metrics - return metrics + self.include_router(router) def _get_cpu_percent(all_processes): @@ -60,6 +72,3 @@ def get_cpu_percent(p): return 0 return sum([get_cpu_percent(p) for p in all_processes]) - - -r = register_router(router) diff --git a/plugins/resource_usage/pyproject.toml b/plugins/resource_usage/pyproject.toml index 24cbb81d..126b63a9 100644 --- a/plugins/resource_usage/pyproject.toml +++ b/plugins/resource_usage/pyproject.toml @@ -5,9 +5,13 @@ build-backend = "hatchling.build" [project] name = "fps_resource_usage" description = "An FPS plugin for the resource usage API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "psutil >=5.9.4", "anyio >=3.6.2", ] +dependencies = [ + "psutil >=5.9.4", + "types-psutil", + "anyio >=3.6.2", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,11 +33,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-resource-usage = "fps_resource_usage.routes" - -[project.entry-points.fps_config] -fps-resource-usage = "fps_resource_usage.config" +[project.entry-points."asphalt.components"] +resource_usage = "fps_resource_usage.main:ResourceUsageComponent" [tool.hatch.version] path = "fps_resource_usage/__init__.py" diff --git a/plugins/retrolab/fps_retrolab/config.py b/plugins/retrolab/fps_retrolab/config.py deleted file mode 100644 index 7e971c5e..00000000 --- a/plugins/retrolab/fps_retrolab/config.py +++ /dev/null @@ -1,13 +0,0 @@ -from fps.config import PluginModel, get_config # type: ignore -from fps.hooks import register_config # type: ignore - - -class RetroLabConfig(PluginModel): - pass - - -def get_rlab_config(): - return get_config(RetroLabConfig) - - -c = register_config(RetroLabConfig) diff --git a/plugins/retrolab/fps_retrolab/main.py b/plugins/retrolab/fps_retrolab/main.py new file mode 100644 index 00000000..661751b1 --- /dev/null +++ b/plugins/retrolab/fps_retrolab/main.py @@ -0,0 +1,22 @@ +from asphalt.core import Component, Context +from jupyverse_api.app import App +from jupyverse_api.auth import Auth +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.retrolab import RetroLab +from jupyverse_api.lab import Lab + +from .routes import _RetroLab + + +class RetroLabComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + frontend_config = await ctx.request_resource(FrontendConfig) + lab = await ctx.request_resource(Lab) # type: ignore + + retrolab = _RetroLab(app, auth, frontend_config, lab) + ctx.add_resource(retrolab, types=RetroLab) diff --git a/plugins/retrolab/fps_retrolab/routes.py b/plugins/retrolab/fps_retrolab/routes.py index af735794..52c02ad3 100644 --- a/plugins/retrolab/fps_retrolab/routes.py +++ b/plugins/retrolab/fps_retrolab/routes.py @@ -5,143 +5,139 @@ from fastapi import APIRouter, Depends from fastapi.responses import HTMLResponse from fastapi.staticfiles import StaticFiles -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user # type: ignore -from fps_frontend.config import get_frontend_config # type: ignore -from fps_lab.config import get_lab_config # type: ignore -from fps_lab.routes import init_router # type: ignore -from fps_lab.utils import get_federated_extensions # type: ignore - -router = APIRouter() -prefix_dir, federated_extensions = init_router(router, "retro/tree") -retrolab_dir = Path(retrolab.__file__).parent - -router.mount( - "/static/retro", - StaticFiles(directory=retrolab_dir / "static"), - name="static", -) - -router.mount( - "/lab/extensions/@retrolab/lab-extension/static", - StaticFiles(directory=retrolab_dir / "labextension" / "static"), - name="labextension/static", -) - -for path in (retrolab_dir / "labextension" / "static").glob("remoteEntry.*.js"): - load = f"static/{path.name}" - break -retro_federated_extensions = [ - { - "extension": "./extension", - "load": load, - "name": "@retrolab/lab-extension", - "style": "./style", - } -] - - -@router.get("/retro/tree", response_class=HTMLResponse) -async def get_tree( - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return get_index("Tree", "tree", lab_config.collaborative, frontend_config.base_url) - - -@router.get("/retro/notebooks/{path:path}", response_class=HTMLResponse) -async def get_notebook( - path, - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return get_index(path, "notebooks", lab_config.collaborative, frontend_config.base_url) - - -@router.get("/retro/edit/{path:path}", response_class=HTMLResponse) -async def edit_file( - path, - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return get_index(path, "edit", lab_config.collaborative, frontend_config.base_url) - - -@router.get("/retro/consoles/{path:path}", response_class=HTMLResponse) -async def get_console( - path, - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return get_index(path, "consoles", lab_config.collaborative, frontend_config.base_url) - - -@router.get("/retro/terminals/{name}", response_class=HTMLResponse) -async def get_terminal( - name: str, - user: User = Depends(current_user()), - frontend_config=Depends(get_frontend_config), - lab_config=Depends(get_lab_config), -): - return get_index(name, "terminals", lab_config.collaborative, frontend_config.base_url) - - -def get_index(doc_name, retro_page, collaborative, base_url="/"): - extensions_dir = prefix_dir / "share" / "jupyter" / "labextensions" - federated_extensions, disabled_extension = get_federated_extensions(extensions_dir) - page_config = { - "appName": "RetroLab", - "appNamespace": "retro", - "appSettingsDir": (prefix_dir / "share" / "jupyter" / "lab" / "settings").as_posix(), - "appUrl": "/lab", - "appVersion": retrolab.__version__, - "baseUrl": base_url, - "cacheFiles": True, - "collaborative": collaborative, - "disabledExtensions": disabled_extension, - "extraLabextensionsPath": [], - "federated_extensions": retro_federated_extensions + federated_extensions, - "frontendUrl": "/retro/", - "fullAppUrl": f"{base_url}lab", - "fullLabextensionsUrl": f"{base_url}lab/extensions", - "fullLicensesUrl": f"{base_url}lab/api/licenses", - "fullListingsUrl": f"{base_url}lab/api/listings", - "fullMathjaxUrl": f"{base_url}static/notebook/components/MathJax/MathJax.js", - "fullSettingsUrl": f"{base_url}lab/api/settings", - "fullStaticUrl": f"{base_url}static/retro", - "fullThemesUrl": f"{base_url}lab/api/themes", - "fullTranslationsApiUrl": f"{base_url}lab/api/translations", - "fullTreeUrl": f"{base_url}lab/tree", - "fullWorkspacesApiUrl": f"{base_url}lab/api/workspaces", - "labextensionsPath": [(prefix_dir / "share" / "jupyter" / "labextensions").as_posix()], - "labextensionsUrl": "/lab/extensions", - "licensesUrl": "/lab/api/licenses", - "listingsUrl": "/lab/api/listings", - "mathjaxConfig": "TeX-AMS-MML_HTMLorMML-full,Safe", - "retroLogo": False, - "retroPage": retro_page, - "schemasDir": (prefix_dir / "share" / "jupyter" / "lab" / "schemas").as_posix(), - "settingsUrl": "/lab/api/settings", - "staticDir": (retrolab_dir / "static").as_posix(), - "templatesDir": (retrolab_dir / "templates").as_posix(), - "terminalsAvailable": True, - "themesDir": (prefix_dir / "share" / "jupyter" / "lab" / "themes").as_posix(), - "themesUrl": "/lab/api/themes", - "translationsApiUrl": "/lab/api/translations", - "treeUrl": "/lab/tree", - "workspacesApiUrl": "/lab/api/workspaces", - "wsUrl": "", - } - index = ( - INDEX_HTML.replace("PAGE_CONFIG", json.dumps(page_config)) - .replace("DOC_NAME", doc_name) - .replace("BASE_URL", base_url) - ) - return index +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.frontend import FrontendConfig +from jupyverse_api.lab import Lab +from jupyverse_api.retrolab import RetroLab + + +class _RetroLab(RetroLab): + def __init__(self, app: App, auth: Auth, frontend_config: FrontendConfig, lab: Lab) -> None: + super().__init__(app) + + router = APIRouter() + prefix_dir, federated_extensions = lab.init_router(router, "retro/tree") + retrolab_dir = Path(retrolab.__file__).parent + + self.mount( + "/static/retro", + StaticFiles(directory=retrolab_dir / "static"), + name="static", + ) + + for path in (retrolab_dir / "labextension" / "static").glob("remoteEntry.*.js"): + load = f"static/{path.name}" + break + retro_federated_extensions = [ + { + "extension": "./extension", + "load": load, + "name": "@retrolab/lab-extension", + "style": "./style", + } + ] + + @router.get("/retro/tree", response_class=HTMLResponse) + async def get_tree( + user: User = Depends(auth.current_user()), + ): + return get_index( + "Tree", "tree", frontend_config.collaborative, frontend_config.base_url + ) + + @router.get("/retro/notebooks/{path:path}", response_class=HTMLResponse) + async def get_notebook( + path, + user: User = Depends(auth.current_user()), + ): + return get_index( + path, "notebooks", frontend_config.collaborative, frontend_config.base_url + ) + + @router.get("/retro/edit/{path:path}", response_class=HTMLResponse) + async def edit_file( + path, + user: User = Depends(auth.current_user()), + ): + return get_index(path, "edit", frontend_config.collaborative, frontend_config.base_url) + + @router.get("/retro/consoles/{path:path}", response_class=HTMLResponse) + async def get_console( + path, + user: User = Depends(auth.current_user()), + ): + return get_index( + path, "consoles", frontend_config.collaborative, frontend_config.base_url + ) + + @router.get("/retro/terminals/{name}", response_class=HTMLResponse) + async def get_terminal( + name: str, + user: User = Depends(auth.current_user()), + ): + return get_index( + name, "terminals", frontend_config.collaborative, frontend_config.base_url + ) + + def get_index(doc_name, retro_page, collaborative, base_url="/"): + extensions_dir = prefix_dir / "share" / "jupyter" / "labextensions" + federated_extensions, disabled_extension = lab.get_federated_extensions(extensions_dir) + page_config = { + "appName": "RetroLab", + "appNamespace": "retro", + "appSettingsDir": ( + prefix_dir / "share" / "jupyter" / "lab" / "settings" + ).as_posix(), + "appUrl": "/lab", + "appVersion": retrolab.__version__, + "baseUrl": base_url, + "cacheFiles": True, + "collaborative": collaborative, + "disabledExtensions": disabled_extension, + "extraLabextensionsPath": [], + "federated_extensions": retro_federated_extensions + federated_extensions, + "frontendUrl": "/retro/", + "fullAppUrl": f"{base_url}lab", + "fullLabextensionsUrl": f"{base_url}lab/extensions", + "fullLicensesUrl": f"{base_url}lab/api/licenses", + "fullListingsUrl": f"{base_url}lab/api/listings", + "fullMathjaxUrl": f"{base_url}static/notebook/components/MathJax/MathJax.js", + "fullSettingsUrl": f"{base_url}lab/api/settings", + "fullStaticUrl": f"{base_url}static/retro", + "fullThemesUrl": f"{base_url}lab/api/themes", + "fullTranslationsApiUrl": f"{base_url}lab/api/translations", + "fullTreeUrl": f"{base_url}lab/tree", + "fullWorkspacesApiUrl": f"{base_url}lab/api/workspaces", + "labextensionsPath": [ + (prefix_dir / "share" / "jupyter" / "labextensions").as_posix() + ], + "labextensionsUrl": "/lab/extensions", + "licensesUrl": "/lab/api/licenses", + "listingsUrl": "/lab/api/listings", + "mathjaxConfig": "TeX-AMS-MML_HTMLorMML-full,Safe", + "retroLogo": False, + "retroPage": retro_page, + "schemasDir": (prefix_dir / "share" / "jupyter" / "lab" / "schemas").as_posix(), + "settingsUrl": "/lab/api/settings", + "staticDir": (retrolab_dir / "static").as_posix(), + "templatesDir": (retrolab_dir / "templates").as_posix(), + "terminalsAvailable": True, + "themesDir": (prefix_dir / "share" / "jupyter" / "lab" / "themes").as_posix(), + "themesUrl": "/lab/api/themes", + "translationsApiUrl": "/lab/api/translations", + "treeUrl": "/lab/tree", + "workspacesApiUrl": "/lab/api/workspaces", + "wsUrl": "", + } + index = ( + INDEX_HTML.replace("PAGE_CONFIG", json.dumps(page_config)) + .replace("DOC_NAME", doc_name) + .replace("BASE_URL", base_url) + ) + return index + + self.include_router(router) INDEX_HTML = """\ @@ -172,5 +168,3 @@ def get_index(doc_name, retro_page, collaborative, base_url="/"): """ - -r = register_router(router) diff --git a/plugins/retrolab/pyproject.toml b/plugins/retrolab/pyproject.toml index 38ad5ac2..ca713e9d 100644 --- a/plugins/retrolab/pyproject.toml +++ b/plugins/retrolab/pyproject.toml @@ -5,9 +5,11 @@ build-backend = "hatchling.build" [project] name = "fps_retrolab" description = "An FPS plugin for the RetroLab API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "fps-frontend", "fps-lab", "retrolab",] +dependencies = [ + "retrolab", +] dynamic = [ "version",] [[project.authors]] name = "Jupyter Development Team" @@ -29,11 +31,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-retrolab = "fps_retrolab.routes" - -[project.entry-points.fps_config] -fps-retrolab = "fps_retrolab.config" +[project.entry-points."asphalt.components"] +retrolab = "fps_retrolab.main:RetroLabComponent" [tool.hatch.version] path = "fps_retrolab/__init__.py" diff --git a/plugins/terminals/fps_terminals/main.py b/plugins/terminals/fps_terminals/main.py new file mode 100644 index 00000000..1ec474aa --- /dev/null +++ b/plugins/terminals/fps_terminals/main.py @@ -0,0 +1,27 @@ +import os +from typing import Type + +from asphalt.core import Component, Context +from jupyverse_api.auth import Auth +from jupyverse_api.terminals import Terminals, TerminalServer +from jupyverse_api.app import App + +from .routes import _Terminals + +_TerminalServer: Type[TerminalServer] +if os.name == "nt": + from .win_server import _TerminalServer +else: + from .server import _TerminalServer + + +class TerminalsComponent(Component): + async def start( + self, + ctx: Context, + ) -> None: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + + terminals = _Terminals(app, auth, _TerminalServer) + ctx.add_resource(terminals, types=Terminals) diff --git a/plugins/terminals/fps_terminals/routes.py b/plugins/terminals/fps_terminals/routes.py index 5ddbe89a..5856a181 100644 --- a/plugins/terminals/fps_terminals/routes.py +++ b/plugins/terminals/fps_terminals/routes.py @@ -1,70 +1,66 @@ -import os from datetime import datetime from http import HTTPStatus -from typing import Any, Dict +from typing import Any, Dict, Type from fastapi import APIRouter, Depends, Response -from fps.hooks import register_router # type: ignore -from fps_auth_base import User, current_user, websocket_auth # type: ignore +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.terminals import Terminals, TerminalServer from .models import Terminal -if os.name == "nt": - from .win_server import TerminalServer # type: ignore -else: - from .server import TerminalServer # type: ignore - -router = APIRouter() - TERMINALS: Dict[str, Dict[str, Any]] = {} -@router.get("/api/terminals") -async def get_terminals(user: User = Depends(current_user({"terminals": ["read"]}))): - return [terminal["info"] for terminal in TERMINALS.values()] - - -@router.post("/api/terminals") -async def create_terminal( - user: User = Depends(current_user({"terminals": ["write"]})), -): - name = str(len(TERMINALS) + 1) - terminal = Terminal( - **{ - "name": name, - "last_activity": datetime.utcnow().isoformat() + "Z", - } - ) - server = TerminalServer() - TERMINALS[name] = {"info": terminal, "server": server} - return terminal - +class _Terminals(Terminals): + def __init__(self, app: App, auth: Auth, _TerminalServer: Type[TerminalServer]) -> None: + super().__init__(app) + router = APIRouter() -@router.delete("/api/terminals/{name}", status_code=204) -async def delete_terminal( - name: str, - user: User = Depends(current_user(permissions={"terminals": ["write"]})), -): - for websocket in TERMINALS[name]["server"].websockets: - TERMINALS[name]["server"].quit(websocket) - del TERMINALS[name] - return Response(status_code=HTTPStatus.NO_CONTENT.value) + @router.get("/api/terminals") + async def get_terminals( + user: User = Depends(auth.current_user({"terminals": ["read"]})), + ): + return [terminal["info"] for terminal in TERMINALS.values()] + @router.post("/api/terminals") + async def create_terminal( + user: User = Depends(auth.current_user({"terminals": ["write"]})), + ): + name = str(len(TERMINALS) + 1) + terminal = Terminal( + name=name, + last_activity=datetime.utcnow().isoformat() + "Z", + ) + server = _TerminalServer() + TERMINALS[name] = {"info": terminal, "server": server} + return terminal -@router.websocket("/terminals/websocket/{name}") -async def terminal_websocket( - name, - websocket_permissions=Depends(websocket_auth(permissions={"terminals": ["read", "execute"]})), -): - if websocket_permissions is None: - return - websocket, permissions = websocket_permissions - await websocket.accept() - await TERMINALS[name]["server"].serve(websocket, permissions) - if name in TERMINALS: - TERMINALS[name]["server"].quit(websocket) - if not TERMINALS[name]["server"].websockets: + @router.delete("/api/terminals/{name}", status_code=204) + async def delete_terminal( + name: str, + user: User = Depends(auth.current_user(permissions={"terminals": ["write"]})), + ): + for websocket in TERMINALS[name]["server"].websockets: + TERMINALS[name]["server"].quit(websocket) del TERMINALS[name] + return Response(status_code=HTTPStatus.NO_CONTENT.value) + @router.websocket("/terminals/websocket/{name}") + async def terminal_websocket( + name, + websocket_permissions=Depends( + auth.websocket_auth(permissions={"terminals": ["read", "execute"]}) + ), + ): + if websocket_permissions is None: + return + websocket, permissions = websocket_permissions + await websocket.accept() + await TERMINALS[name]["server"].serve(websocket, permissions) + if name in TERMINALS: + TERMINALS[name]["server"].quit(websocket) + if not TERMINALS[name]["server"].websockets: + del TERMINALS[name] -r = register_router(router) + self.include_router(router) diff --git a/plugins/terminals/fps_terminals/server.py b/plugins/terminals/fps_terminals/server.py index 43d33eac..331c3fed 100644 --- a/plugins/terminals/fps_terminals/server.py +++ b/plugins/terminals/fps_terminals/server.py @@ -7,6 +7,7 @@ import termios from fastapi import WebSocketDisconnect +from jupyverse_api.terminals import TerminalServer def open_terminal(command="bash", columns=80, lines=24): @@ -19,7 +20,7 @@ def open_terminal(command="bash", columns=80, lines=24): return fd -class TerminalServer: +class _TerminalServer(TerminalServer): def __init__(self): self.fd = open_terminal() self.p_out = os.fdopen(self.fd, "w+b", 0) diff --git a/plugins/terminals/fps_terminals/win_server.py b/plugins/terminals/fps_terminals/win_server.py index 71d556c2..9aa3e259 100644 --- a/plugins/terminals/fps_terminals/win_server.py +++ b/plugins/terminals/fps_terminals/win_server.py @@ -1,6 +1,7 @@ import asyncio import os +from jupyverse_api.terminals import TerminalServer from winpty import PTY # type: ignore @@ -11,7 +12,7 @@ def open_terminal(command="C:\\Windows\\System32\\cmd.exe", columns=80, lines=24 return process -class TerminalServer: +class _TerminalServer(TerminalServer): def __init__(self): self.process = open_terminal() self.websockets = [] diff --git a/plugins/terminals/pyproject.toml b/plugins/terminals/pyproject.toml index 18624265..ae5541ed 100644 --- a/plugins/terminals/pyproject.toml +++ b/plugins/terminals/pyproject.toml @@ -5,10 +5,13 @@ build-backend = "hatchling.build" [project] name = "fps_terminals" description = "An FPS plugin for the terminals API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = ["jupyter", "server", "fastapi", "plugins"] requires-python = ">=3.8" -dependencies = [ "fps >=0.0.8", "fps-auth-base", "websockets", "pywinpty;platform_system=='Windows'",] -dynamic = [ "version",] +dependencies = [ + "websockets", + "pywinpty;platform_system=='Windows'", +] +dynamic = ["version"] [[project.authors]] name = "Jupyter Development Team" email = "jupyter@googlegroups.com" @@ -29,8 +32,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-terminals = "fps_terminals.routes" +[project.entry-points."asphalt.components"] +terminals = "fps_terminals.main:TerminalsComponent" [tool.hatch.version] path = "fps_terminals/__init__.py" diff --git a/plugins/yjs/fps_yjs/main.py b/plugins/yjs/fps_yjs/main.py new file mode 100644 index 00000000..7cbd9685 --- /dev/null +++ b/plugins/yjs/fps_yjs/main.py @@ -0,0 +1,33 @@ +from __future__ import annotations +from collections.abc import AsyncGenerator +from typing import Optional + +from asphalt.core import Component, Context, context_teardown +from jupyverse_api.app import App +from jupyverse_api.auth import Auth +from jupyverse_api.contents import Contents +from jupyverse_api.yjs import Yjs + +from .routes import _Yjs + + +class YjsComponent(Component): + @context_teardown + async def start( + self, + ctx: Context, + ) -> AsyncGenerator[None, Optional[BaseException]]: + app = await ctx.request_resource(App) + auth = await ctx.request_resource(Auth) # type: ignore + contents = await ctx.request_resource(Contents) # type: ignore + + yjs = _Yjs(app, auth, contents) + ctx.add_resource(yjs, types=Yjs) + + # start indexing in the background + contents.file_id_manager + + yield + + contents.file_id_manager.stop_watching_files.set() + await contents.file_id_manager.stopped_watching_files.wait() diff --git a/plugins/yjs/fps_yjs/models.py b/plugins/yjs/fps_yjs/models.py index 1fa3b71c..0fe8aab6 100644 --- a/plugins/yjs/fps_yjs/models.py +++ b/plugins/yjs/fps_yjs/models.py @@ -1,6 +1,6 @@ from pydantic import BaseModel -class CreateRoomId(BaseModel): +class CreateDocumentSession(BaseModel): format: str type: str diff --git a/plugins/yjs/fps_yjs/py.typed b/plugins/yjs/fps_yjs/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/plugins/yjs/fps_yjs/routes.py b/plugins/yjs/fps_yjs/routes.py index 3e63aba7..a48d40c0 100644 --- a/plugins/yjs/fps_yjs/routes.py +++ b/plugins/yjs/fps_yjs/routes.py @@ -2,6 +2,7 @@ from datetime import datetime from pathlib import Path from typing import Optional, Tuple +from uuid import uuid4 from fastapi import ( APIRouter, @@ -12,52 +13,284 @@ WebSocketDisconnect, status, ) -from fastapi.responses import PlainTextResponse -from fps.hooks import register_router # type: ignore -from fps_auth_base import websocket_auth # type: ignore -from fps_auth_base import User, current_user -from fps_contents.fileid import FileIdManager -from fps_contents.routes import read_content, write_content # type: ignore from jupyter_ydoc import ydocs as YDOCS # type: ignore -from ypy_websocket.websocket_server import WebsocketServer, YRoom # type: ignore -from ypy_websocket.ystore import BaseYStore, SQLiteYStore, YDocNotFound # type: ignore -from ypy_websocket.yutils import YMessageType, YSyncMessageType # type: ignore +from jupyverse_api.app import App +from jupyverse_api.auth import Auth, User +from jupyverse_api.contents import Contents +from jupyverse_api.yjs import Yjs +from ypy_websocket.websocket_server import WebsocketServer, YRoom +from ypy_websocket.ystore import BaseYStore, SQLiteYStore, YDocNotFound +from ypy_websocket.yutils import YMessageType, YSyncMessageType -from .models import CreateRoomId +from .models import CreateDocumentSession YFILE = YDOCS["file"] AWARENESS = 1 RENAME_SESSION = 127 +SERVER_SESSION = uuid4().hex class JupyterSQLiteYStore(SQLiteYStore): db_path = ".jupyter_ystore.db" -router = APIRouter() +class _Yjs(Yjs): + def __init__( + self, + app: App, + auth: Auth, + contents: Contents, + ) -> None: + super().__init__(app) + + class YDocWebSocketHandler: + saving_document: Optional[asyncio.Task] + websocket_server = JupyterWebsocketServer(rooms_ready=False, auto_clean_rooms=False) + + def __init__(self, websocket, path, permissions): + self.websocket = websocket + self.can_write = permissions is None or "write" in permissions.get("yjs", []) + self.lock = asyncio.Lock() + self.room = self.websocket_server.get_room(self.websocket.path) + self.set_file_info(path) + + async def get_file_info(self) -> Tuple[str, str, str]: + room_name = self.websocket_server.get_room_name(self.room) + file_format, file_type, file_id = room_name.split(":", 2) + file_path = await contents.file_id_manager.get_path(file_id) + if file_path is None: + raise RuntimeError(f"File {self.room.document.path} cannot be found anymore") + if file_path != self.room.document.path: + self.room.document.path = file_path + return file_format, file_type, file_path + + def set_file_info(self, value: str) -> None: + self.websocket_server.rename_room(value, from_room=self.room) + self.websocket.path = value + + async def serve(self): + self.set_file_info(self.websocket.path) + self.saving_document = None + self.room.on_message = self.on_message + + # cancel the deletion of the room if it was scheduled + if not self.room.is_transient and self.room.cleaner is not None: + self.room.cleaner.cancel() + + if not self.room.is_transient and not self.room.ready: + file_format, file_type, file_path = await self.get_file_info() + is_notebook = file_type == "notebook" + model = await contents.read_content(file_path, True, as_json=is_notebook) + self.last_modified = to_datetime(model.last_modified) + # check again if ready, because loading the file is async + if not self.room.ready: + # try to apply Y updates from the YStore for this document + try: + await self.room.ystore.apply_updates(self.room.ydoc) + read_from_source = False + except YDocNotFound: + # YDoc not found in the YStore, create the document from + # the source file (no change history) + read_from_source = True + if not read_from_source: + # if YStore updates and source file are out-of-sync, resync updates + # with source + if self.room.document.source != model.content: + read_from_source = True + if read_from_source: + self.room.document.source = model.content + await self.room.ystore.encode_state_as_update(self.room.ydoc) + + self.room.document.dirty = False + self.room.ready = True + self.room.watcher = asyncio.create_task(self.watch_file()) + # save the document when changed + self.room.document.observe(self.on_document_change) + + await self.websocket_server.serve(self.websocket) + if not self.room.is_transient and not self.room.clients: + # no client in this room after we disconnect + # keep the document for a while in case someone reconnects + self.room.cleaner = asyncio.create_task(self.clean_room()) + + async def on_message(self, message: bytes) -> bool: + """ + Called whenever a message is received, before forwarding it to other clients. + + :param message: received message + :returns: True if the message must be discarded, False otherwise (default: False). + """ + skip = False + byte = message[0] + msg = message[1:] + if byte == RENAME_SESSION: + # The client moved the document to a different location. After receiving this + # message, we make the current document available under a different url. + # The other clients are automatically notified of this change because + # the path is shared through the Yjs document as well. + new_room_name = msg.decode("utf-8") + self.set_file_info(new_room_name) + self.websocket_server.rename_room(new_room_name, from_room=self.room) + # send rename acknowledge + await self.websocket.send(bytes([RENAME_SESSION, 1])) + elif byte == AWARENESS: + # changes = self.room.awareness.get_changes(msg) + # # filter out message depending on changes + # skip = True + pass + elif byte == YMessageType.SYNC: + if not self.can_write and msg[0] == YSyncMessageType.SYNC_UPDATE: + skip = True + else: + skip = True + return skip + + async def watch_file(self): + file_format, file_type, file_path = await self.get_file_info() + while True: + watcher = contents.file_id_manager.watch(file_path) + async for changes in watcher: + file_format, file_type, new_file_path = await self.get_file_info() + if new_file_path != file_path: + # file was renamed + contents.file_id_manager.unwatch(file_path, watcher) + file_path = new_file_path + break + await self.maybe_load_document() + + async def maybe_load_document(self): + file_format, file_type, file_path = await self.get_file_info() + async with self.lock: + model = await contents.read_content(file_path, False) + # do nothing if the file was saved by us + if self.last_modified < to_datetime(model.last_modified): + is_notebook = file_type == "notebook" + model = await contents.read_content(file_path, True, as_json=is_notebook) + self.room.document.source = model.content + self.last_modified = to_datetime(model.last_modified) + + async def clean_room(self) -> None: + await asyncio.sleep(60) # FIXME: pass in config + if self.room.watcher: + self.room.watcher.cancel() + self.room.document.unobserve() + self.websocket_server.delete_room(room=self.room) + + def on_document_change(self, target, event): + if target == "state" and "dirty" in event.keys: + dirty = event.keys["dirty"]["newValue"] + if not dirty: + # we cleared the dirty flag, nothing to save + return + # unobserve and observe again because the structure of the document may have changed + # e.g. a new cell added to a notebook + self.room.document.unobserve() + self.room.document.observe(self.on_document_change) + if self.saving_document is not None and not self.saving_document.done(): + # the document is being saved, cancel that + self.saving_document.cancel() + self.saving_document = None + self.saving_document = asyncio.create_task(self.maybe_save_document()) + + async def maybe_save_document(self): + # save after 1 second of inactivity to prevent too frequent saving + await asyncio.sleep(1) + # if the room cannot be found, don't save + try: + file_format, file_type, file_path = await self.get_file_info() + except Exception: + return + is_notebook = file_type == "notebook" + async with self.lock: + model = await contents.read_content(file_path, True, as_json=is_notebook) + if self.last_modified < to_datetime(model.last_modified): + # file changed on disk, let's revert + self.room.document.source = model.content + self.last_modified = to_datetime(model.last_modified) + return + if model.content != self.room.document.source: + # don't save if not needed + # this also prevents the dirty flag from bouncing between windows of + # the same document opened as different types (e.g. notebook/text editor) + format = "json" if file_type == "notebook" else "text" + content = { + "content": self.room.document.source, + "format": format, + "path": file_path, + "type": file_type, + } + async with self.lock: + await contents.write_content(content) + model = await contents.read_content(file_path, False) + self.last_modified = to_datetime(model.last_modified) + self.room.document.dirty = False + router = APIRouter() -def to_datetime(iso_date: str) -> datetime: - return datetime.fromisoformat(iso_date.rstrip("Z")) + async def serve_room(websocket_permissions, path): + if websocket_permissions is None: + return + websocket, permissions = websocket_permissions + await websocket.accept() + socket = YDocWebSocketHandler(WebsocketAdapter(websocket, path), path, permissions) + await socket.serve() + + @router.websocket("/api/yjs/{path:path}") + async def yjs_websocket( + path, + websocket_permissions=Depends( + auth.websocket_auth(permissions={"yjs": ["read", "write"]}) + ), + ): + await serve_room(websocket_permissions, path) + + @router.websocket("/api/collaboration/room/{path:path}") + async def collaboration_room_websocket( + path, + websocket_permissions=Depends( + auth.websocket_auth(permissions={"yjs": ["read", "write"]}) + ), + ): + await serve_room(websocket_permissions, path) + + @router.put("/api/collaboration/session/{path:path}", status_code=200) + async def create_roomid( + path, + request: Request, + response: Response, + user: User = Depends(auth.current_user(permissions={"contents": ["read"]})), + ): + # we need to process the request manually + # see https://github.com/tiangolo/fastapi/issues/3373#issuecomment-1306003451 + create_document_session = CreateDocumentSession(**(await request.json())) + idx = await contents.file_id_manager.get_id(path) + if idx is not None: + return { + "format": create_document_session.format, + "type": create_document_session.type, + "fileId": idx, + "sessionId": SERVER_SESSION, + } + + idx = await contents.file_id_manager.index(path) + if idx is None: + raise HTTPException(status_code=404, detail=f"File {path} does not exist") + + response.status_code = status.HTTP_201_CREATED + return { + "format": create_document_session.format, + "type": create_document_session.type, + "fileId": idx, + "sessionId": SERVER_SESSION, + } + self.include_router(router) -@router.on_event("startup") -async def startup(): - # start indexing in the background - FileIdManager() + self.YDocWebSocketHandler = YDocWebSocketHandler -@router.websocket("/api/yjs/{path:path}") -async def websocket_endpoint( - path, - websocket_permissions=Depends(websocket_auth(permissions={"yjs": ["read", "write"]})), -): - if websocket_permissions is None: - return - websocket, permissions = websocket_permissions - await websocket.accept() - socket = YDocWebSocketHandler(WebsocketAdapter(websocket, path), path, permissions) - await socket.serve() +def to_datetime(iso_date: str) -> datetime: + return datetime.fromisoformat(iso_date.rstrip("Z")) class WebsocketAdapter: @@ -125,211 +358,3 @@ def get_room(self, path: str) -> YRoom: # it is a transient document (e.g. awareness) self.rooms[path] = TransientRoom() return self.rooms[path] - - -class YDocWebSocketHandler: - saving_document: Optional[asyncio.Task] - websocket_server = JupyterWebsocketServer(rooms_ready=False, auto_clean_rooms=False) - - def __init__(self, websocket, path, permissions): - self.websocket = websocket - self.can_write = permissions is None or "write" in permissions.get("yjs", []) - self.lock = asyncio.Lock() - self.room = self.websocket_server.get_room(self.websocket.path) - self.set_file_info(path) - - async def get_file_info(self) -> Tuple[str, str, str]: - room_name = self.websocket_server.get_room_name(self.room) - file_format, file_type, file_id = room_name.split(":", 2) - file_path = await FileIdManager().get_path(file_id) - if file_path is None: - raise RuntimeError(f"File {self.room.document.path} cannot be found anymore") - if file_path != self.room.document.path: - self.room.document.path = file_path - return file_format, file_type, file_path - - def set_file_info(self, value: str) -> None: - self.websocket_server.rename_room(value, from_room=self.room) - self.websocket.path = value - - async def serve(self): - self.set_file_info(self.websocket.path) - self.saving_document = None - self.room.on_message = self.on_message - - # cancel the deletion of the room if it was scheduled - if not self.room.is_transient and self.room.cleaner is not None: - self.room.cleaner.cancel() - - if not self.room.is_transient and not self.room.ready: - file_format, file_type, file_path = await self.get_file_info() - is_notebook = file_type == "notebook" - model = await read_content(file_path, True, as_json=is_notebook) - self.last_modified = to_datetime(model.last_modified) - # check again if ready, because loading the file is async - if not self.room.ready: - # try to apply Y updates from the YStore for this document - try: - await self.room.ystore.apply_updates(self.room.ydoc) - read_from_source = False - except YDocNotFound: - # YDoc not found in the YStore, create the document from - # the source file (no change history) - read_from_source = True - if not read_from_source: - # if YStore updates and source file are out-of-sync, resync updates with source - if self.room.document.source != model.content: - read_from_source = True - if read_from_source: - self.room.document.source = model.content - await self.room.ystore.encode_state_as_update(self.room.ydoc) - - self.room.document.dirty = False - self.room.ready = True - self.room.watcher = asyncio.create_task(self.watch_file()) - # save the document when changed - self.room.document.observe(self.on_document_change) - - await self.websocket_server.serve(self.websocket) - if not self.room.is_transient and not self.room.clients: - # no client in this room after we disconnect - # keep the document for a while in case someone reconnects - self.room.cleaner = asyncio.create_task(self.clean_room()) - - async def on_message(self, message: bytes) -> bool: - """ - Called whenever a message is received, before forwarding it to other clients. - - :param message: received message - :returns: True if the message must be discarded, False otherwise (default: False). - """ - skip = False - byte = message[0] - msg = message[1:] - if byte == RENAME_SESSION: - # The client moved the document to a different location. After receiving this message, - # we make the current document available under a different url. - # The other clients are automatically notified of this change because - # the path is shared through the Yjs document as well. - new_room_name = msg.decode("utf-8") - self.set_file_info(new_room_name) - self.websocket_server.rename_room(new_room_name, from_room=self.room) - # send rename acknowledge - await self.websocket.send(bytes([RENAME_SESSION, 1])) - elif byte == AWARENESS: - # changes = self.room.awareness.get_changes(msg) - # # filter out message depending on changes - # skip = True - pass - elif byte == YMessageType.SYNC: - if not self.can_write and msg[0] == YSyncMessageType.SYNC_UPDATE: - skip = True - else: - skip = True - return skip - - async def watch_file(self): - file_format, file_type, file_path = await self.get_file_info() - while True: - watcher = FileIdManager().watch(file_path) - async for changes in watcher: - file_format, file_type, new_file_path = await self.get_file_info() - if new_file_path != file_path: - # file was renamed - FileIdManager().unwatch(file_path, watcher) - file_path = new_file_path - break - await self.maybe_load_document() - - async def maybe_load_document(self): - file_format, file_type, file_path = await self.get_file_info() - async with self.lock: - model = await read_content(file_path, False) - # do nothing if the file was saved by us - if self.last_modified < to_datetime(model.last_modified): - is_notebook = file_type == "notebook" - model = await read_content(file_path, True, as_json=is_notebook) - self.room.document.source = model.content - self.last_modified = to_datetime(model.last_modified) - - async def clean_room(self) -> None: - await asyncio.sleep(60) # FIXME: pass in config - if self.room.watcher: - self.room.watcher.cancel() - self.room.document.unobserve() - self.websocket_server.delete_room(room=self.room) - - def on_document_change(self, target, event): - if target == "state" and "dirty" in event.keys: - dirty = event.keys["dirty"]["newValue"] - if not dirty: - # we cleared the dirty flag, nothing to save - return - # unobserve and observe again because the structure of the document may have changed - # e.g. a new cell added to a notebook - self.room.document.unobserve() - self.room.document.observe(self.on_document_change) - if self.saving_document is not None and not self.saving_document.done(): - # the document is being saved, cancel that - self.saving_document.cancel() - self.saving_document = None - self.saving_document = asyncio.create_task(self.maybe_save_document()) - - async def maybe_save_document(self): - # save after 1 second of inactivity to prevent too frequent saving - await asyncio.sleep(1) - # if the room cannot be found, don't save - try: - file_format, file_type, file_path = await self.get_file_info() - except Exception: - return - is_notebook = file_type == "notebook" - async with self.lock: - model = await read_content(file_path, True, as_json=is_notebook) - if self.last_modified < to_datetime(model.last_modified): - # file changed on disk, let's revert - self.room.document.source = model.content - self.last_modified = to_datetime(model.last_modified) - return - if model.content != self.room.document.source: - # don't save if not needed - # this also prevents the dirty flag from bouncing between windows of - # the same document opened as different types (e.g. notebook/text editor) - format = "json" if file_type == "notebook" else "text" - content = { - "content": self.room.document.source, - "format": format, - "path": file_path, - "type": file_type, - } - async with self.lock: - await write_content(content) - model = await read_content(file_path, False) - self.last_modified = to_datetime(model.last_modified) - self.room.document.dirty = False - - -@router.put("/api/yjs/roomid/{path:path}", status_code=200, response_class=PlainTextResponse) -async def create_roomid( - path, - request: Request, - response: Response, - user: User = Depends(current_user(permissions={"contents": ["read"]})), -): - # we need to process the request manually - # see https://github.com/tiangolo/fastapi/issues/3373#issuecomment-1306003451 - create_room_id = CreateRoomId(**(await request.json())) - ws_url = f"{create_room_id.format}:{create_room_id.type}:" - idx = await FileIdManager().get_id(path) - if idx is not None: - return ws_url + idx - - idx = await FileIdManager().index(path) - if idx is None: - raise HTTPException(status_code=404, detail=f"File {path} does not exist") - - response.status_code = status.HTTP_201_CREATED - return ws_url + idx - - -r = register_router(router) diff --git a/plugins/yjs/pyproject.toml b/plugins/yjs/pyproject.toml index a4c247b3..15de9d58 100644 --- a/plugins/yjs/pyproject.toml +++ b/plugins/yjs/pyproject.toml @@ -5,15 +5,12 @@ build-backend = "hatchling.build" [project] name = "fps_yjs" description = "An FPS plugin for the Yjs API" -keywords = [ "jupyter", "server", "fastapi", "pluggy", "plugins",] +keywords = [ "jupyter", "server", "fastapi", "plugins" ] requires-python = ">=3.8" dependencies = [ - "fps >=0.0.8", - "fps-auth-base", - "fps-contents", - "jupyter_ydoc >=0.3.0,<0.4.0", + "jupyter_ydoc >=0.3.4,<0.4.0", "ypy-websocket >=0.8.2,<1", - "y-py >=0.5.9,<0.6.0", + "y-py >=0.6.0,<0.7.0", ] dynamic = [ "version",] [[project.authors]] @@ -36,8 +33,8 @@ ignore = [ ".*",] [tool.jupyter-releaser] skip = [ "check-links",] -[project.entry-points.fps_router] -fps-yjs = "fps_yjs.routes" +[project.entry-points."asphalt.components"] +yjs = "fps_yjs.main:YjsComponent" [tool.hatch.version] path = "fps_yjs/__init__.py" diff --git a/pyproject.toml b/pyproject.toml index 52350d19..ed62833c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,19 +5,19 @@ build-backend = "hatchling.build" [project] name = "jupyverse" description = "A set of FPS plugins implementing a Jupyter server" -keywords = ["jupyter", "server", "fastapi", "pluggy", "plugins"] +keywords = ["jupyter", "server", "fastapi", "plugins"] dynamic = ["version"] requires-python = ">=3.8" dependencies = [ - "fastapi>=0.87.0", - "fps>=0.0.21", - "fps-uvicorn>=0.0.19", - "fps-auth-base>=0.0.42", - "fps-contents>=0.0.42", - "fps-kernels>=0.0.42", - "fps-terminals>=0.0.42", - "fps-nbconvert>=0.0.42", - "fps-yjs>=0.0.42", + "asphalt >=4.11.0,<5", + "asphalt-web[fastapi] >=1.1.0,<2", + "fastapi >=0.94.0,<0.95.0", + "fps-contents", + "fps-kernels", + "fps-terminals", + "fps-nbconvert", + "fps-yjs", + "jupyverse-api", ] [[project.authors]] @@ -35,20 +35,31 @@ text = "BSD 3-Clause License" Homepage = "https://jupyter.org" [project.optional-dependencies] -jupyterlab = [ "fps-jupyterlab >=0.0.42",] -retrolab = [ "fps-retrolab >=0.0.42",] -auth = [ "fps-auth >=0.0.42",] -auth-fief = [ "fps-auth-fief >=0.0.42",] -noauth = ["fps-noauth >=0.0.42"] -test = [ "mypy", "types-setuptools", "pytest", "pytest-asyncio", "pytest-env", "requests", "websockets", "ipykernel",] +jupyterlab = [ "fps-jupyterlab",] +retrolab = [ "fps-retrolab",] +auth = [ "fps-auth",] +auth-fief = [ "fps-auth-fief",] +noauth = ["fps-noauth"] +test = [ + "mypy", + "types-setuptools", + "pytest", + "pytest-asyncio", + "pytest-env", + "httpx", + "httpx-ws", + "requests", + "websockets", + "ipykernel", +] docs = [ "mkdocs", "mkdocs-material",] [tool.hatch.envs.dev] -# TODO: if/when hatch gets support for defining editable dependencies, the +# TODO: if/when hatch gets support for defining editable dependencies, the # pre-install commands here and post-install commands in the matrix can be moved # to the dependencies section pre-install-commands = [ - "pip install -e ./plugins/auth_base", + "pip install -e ./jupyverse_api", "pip install -e ./plugins/contents", "pip install -e ./plugins/frontend", "pip install -e ./plugins/kernels", @@ -57,16 +68,15 @@ pre-install-commands = [ "pip install -e ./plugins/terminals", "pip install -e ./plugins/yjs", "pip install -e ./plugins/resource_usage", + "pip install -e ./plugins/auth", + "pip install -e ./plugins/auth_fief", + "pip install -e ./plugins/noauth", + "pip install -e ./plugins/login", ] dependencies = ["fastapi>=0.87.0"] features = ["test"] [tool.hatch.envs.dev.overrides] -matrix.auth.post-install-commands = [ - { value = "pip install -e ./plugins/noauth", if = ["noauth"] }, - { value = "pip install -e ./plugins/auth", if = ["auth"] }, - { value = "pip install -e ./plugins/auth_fief", if = ["auth_fief"] }, -] matrix.frontend.post-install-commands = [ { value = "pip install -e ./plugins/jupyterlab", if = ["jupyterlab"]}, { value = "pip install -e ./plugins/retrolab", if = ["retrolab"]}, @@ -74,7 +84,6 @@ matrix.frontend.post-install-commands = [ [[tool.hatch.envs.dev.matrix]] frontend = ["jupyterlab", "retrolab"] -auth = ["noauth", "auth", "auth_fief"] [tool.hatch.envs.dev.scripts] test = "pytest ./tests -v" @@ -84,7 +93,7 @@ lint = [ ] typecheck = [ "mypy jupyverse", - "mypy plugins/auth_base/fps_auth_base", + "mypy jupyverse_api", "mypy plugins/frontend/fps_frontend", "mypy plugins/contents/fps_contents", "mypy plugins/kernels/fps_kernels", @@ -92,6 +101,8 @@ typecheck = [ "mypy plugins/jupyterlab/fps_jupyterlab", "mypy plugins/lab/fps_lab", "mypy plugins/auth/fps_auth", + "mypy plugins/noauth/fps_noauth", + "mypy plugins/auth_fief/fps_auth_fief", "mypy plugins/nbconvert/fps_nbconvert", "mypy plugins/yjs/fps_yjs", "mypy plugins/resource_usage/fps_resource_usage", @@ -106,9 +117,6 @@ features = ["docs"] build = "mkdocs build --clean --strict" serve = "mkdocs serve --dev-addr localhost:8000" -[project.scripts] -jupyverse = "fps_uvicorn.cli:app" - [tool.check-manifest] ignore = [ ".*",] @@ -120,7 +128,7 @@ max-line-length = 100 [tool.jupyter-releaser.options] python_packages = [ - "plugins/auth_base:fps-auth-base", + "jupyverse_api:jupyverse-api", "plugins/noauth:fps-noauth", "plugins/auth:fps-auth", "plugins/auth_fief:fps-auth-fief", @@ -135,7 +143,7 @@ python_packages = [ "plugins/yjs:fps-yjs", "plugins/resource_usage:fps-resource-usage", "plugins/login:fps-login", - ".:jupyverse:fps-auth-base,fps-noauth,fps-auth,fps-auth-fief,fps-contents,fps-jupyterlab,fps-kernels,fps-lab,fps-frontend,fps-nbconvert,fps-retrolab,fps-terminals,fps-yjs,fps-resource-usage" + ".:jupyverse:jupyverse-api,fps-noauth,fps-auth,fps-auth-fief,fps-contents,fps-jupyterlab,fps-kernels,fps-lab,fps-frontend,fps-nbconvert,fps-retrolab,fps-terminals,fps-yjs,fps-resource-usage" ] [tool.hatch.version] diff --git a/tests/conftest.py b/tests/conftest.py index 15453e43..8d1eac8c 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,15 +1,10 @@ import os -import socket import subprocess import time from pathlib import Path import pytest - -pytest_plugins = ( - "fps.testing.fixtures", - "fps_auth.fixtures", -) +import requests @pytest.fixture() @@ -17,37 +12,29 @@ def cwd(): return Path(__file__).parents[1] -def get_open_port(): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - s.bind(("", 0)) - s.listen(1) - port = str(s.getsockname()[1]) - s.close() - return port - - @pytest.fixture() -def start_jupyverse(auth_mode, clear_users, cwd, capfd): +def start_jupyverse(auth_mode, clear_users, cwd, unused_tcp_port): os.chdir(cwd) - port = get_open_port() command_list = [ - "jupyverse", - "--no-open-browser", - f"--auth.mode={auth_mode}", - "--auth.clear_users=" + str(clear_users).lower(), - f"--port={port}", + "asphalt", + "run", + "config.yaml", + "--set", + f"component.components.auth.mode={auth_mode}", + "--set", + f"component.components.auth.clear_users={str(clear_users).lower()}", + "--set", + f"component.port={unused_tcp_port}", ] - print(" ".join(command_list)) p = subprocess.Popen(command_list) - dtime, ttime, timeout = 0.1, 0, 10 + url = f"http://127.0.0.1:{unused_tcp_port}" while True: - time.sleep(dtime) - ttime += dtime - if ttime >= timeout: - raise RuntimeError("Timeout while launching Jupyverse") - out, err = capfd.readouterr() - if "Application startup complete" in err: + try: + requests.get(url) + except requests.exceptions.ConnectionError: + time.sleep(0.1) + else: break - url = f"http://127.0.0.1:{port}" yield url p.kill() + p.wait() diff --git a/tests/test_auth.py b/tests/test_auth.py index e1af73a9..1eb33675 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -1,54 +1,127 @@ import pytest -from fps_auth.config import get_auth_config -from starlette.websockets import WebSocketDisconnect +from asphalt.core import Context +from asphalt.web.fastapi import FastAPIComponent +from fastapi import FastAPI +from jupyverse_api.auth import AuthConfig +from httpx import AsyncClient +from httpx_ws import WebSocketUpgradeError, aconnect_ws +from utils import authenticate_client, configure -def test_kernel_channels_unauthenticated(client): - with pytest.raises(WebSocketDisconnect): - with client.websocket_connect( - "/api/kernels/kernel_id_0/channels?session_id=session_id_0", + +COMPONENTS = { + "app": {"type": "app"}, + "auth": {"type": "auth", "test": True}, + "contents": {"type": "contents"}, + "frontend": {"type": "frontend"}, + "lab": {"type": "lab"}, + "jupyterlab": {"type": "jupyterlab"}, + "kernels": {"type": "kernels"}, + "yjs": {"type": "yjs"}, +} + + +@pytest.mark.asyncio +async def test_kernel_channels_unauthenticated(unused_tcp_port): + application = FastAPI() + + async with Context() as ctx: + await FastAPIComponent( + components=COMPONENTS, + app=application, + port=unused_tcp_port, + ).start(ctx) + + with pytest.raises(WebSocketUpgradeError): + async with aconnect_ws( + f"http://127.0.0.1:{unused_tcp_port}/api/kernels/kernel_id_0/channels?session_id=session_id_0", + ): + pass + + +@pytest.mark.asyncio +async def test_kernel_channels_authenticated(unused_tcp_port): + application = FastAPI() + + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=COMPONENTS, + app=application, + port=unused_tcp_port, + ).start(ctx) + + await authenticate_client(http, unused_tcp_port) + async with aconnect_ws( + f"http://127.0.0.1:{unused_tcp_port}/api/kernels/kernel_id_0/channels?session_id=session_id_0", + http, ): pass -def test_kernel_channels_authenticated(authenticated_client): - with authenticated_client.websocket_connect( - "/api/kernels/kernel_id_0/channels?session_id=session_id_0", - ): - pass +@pytest.mark.asyncio +@pytest.mark.parametrize("auth_mode", ("noauth", "token", "user")) +async def test_root_auth(auth_mode, unused_tcp_port): + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) -@pytest.mark.parametrize("auth_mode", ("noauth", "token", "user")) -def test_root_auth(auth_mode, client): - response = client.get("/") - if auth_mode == "noauth": - expected = 200 - content_type = "text/html; charset=utf-8" - elif auth_mode in ["token", "user"]: - expected = 403 - content_type = "application/json" + response = await http.get(f"http://127.0.0.1:{unused_tcp_port}/") + if auth_mode == "noauth": + expected = 302 + elif auth_mode in ["token", "user"]: + expected = 403 - assert response.status_code == expected - assert response.headers["content-type"] == content_type + assert response.status_code == expected + assert response.headers["content-type"] == "application/json" +@pytest.mark.asyncio @pytest.mark.parametrize("auth_mode", ("noauth",)) -def test_no_auth(client): - response = client.get("/lab/api/settings") - assert response.status_code == 200 +async def test_no_auth(auth_mode, unused_tcp_port): + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) + response = await http.get(f"http://127.0.0.1:{unused_tcp_port}/lab") + assert response.status_code == 200 + + +@pytest.mark.asyncio @pytest.mark.parametrize("auth_mode", ("token",)) -def test_token_auth(client): - # no token provided, should not work - response = client.get("/") - assert response.status_code == 403 - # token provided, should work - auth_config = get_auth_config() - response = client.get(f"/?token={auth_config.token}") - assert response.status_code == 200 +async def test_token_auth(auth_mode, unused_tcp_port): + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) + + auth_config = await ctx.request_resource(AuthConfig) + + # no token provided, should not work + response = await http.get(f"http://127.0.0.1:{unused_tcp_port}/") + assert response.status_code == 403 + # token provided, should work + response = await http.get(f"http://127.0.0.1:{unused_tcp_port}/?token={auth_config.token}") + assert response.status_code == 302 + +@pytest.mark.asyncio @pytest.mark.parametrize("auth_mode", ("user",)) @pytest.mark.parametrize( "permissions", @@ -57,11 +130,22 @@ def test_token_auth(client): {"admin": ["read"], "foo": ["bar", "baz"]}, ), ) -def test_permissions(authenticated_client, permissions): - response = authenticated_client.get("/auth/user/me") - if "admin" in permissions.keys(): - # we have the permissions - assert response.status_code == 200 - else: - # we don't have the permissions - assert response.status_code == 403 +async def test_permissions(auth_mode, permissions, unused_tcp_port): + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) + + await authenticate_client(http, unused_tcp_port, permissions=permissions) + response = await http.get(f"http://127.0.0.1:{unused_tcp_port}/auth/user/me") + if "admin" in permissions.keys(): + # we have the permissions + assert response.status_code == 200 + else: + # we don't have the permissions + assert response.status_code == 403 diff --git a/tests/test_contents.py b/tests/test_contents.py index 9d9c5f5f..3ab991fd 100644 --- a/tests/test_contents.py +++ b/tests/test_contents.py @@ -2,11 +2,24 @@ from pathlib import Path import pytest -from utils import clear_content_values, create_content, sort_content_by_name +from asphalt.core import Context +from asphalt.web.fastapi import FastAPIComponent +from fastapi import FastAPI +from httpx import AsyncClient +from utils import configure, clear_content_values, create_content, sort_content_by_name +COMPONENTS = { + "app": {"type": "app"}, + "auth": {"type": "auth", "test": True}, + "contents": {"type": "contents"}, + "frontend": {"type": "frontend"}, +} + + +@pytest.mark.asyncio @pytest.mark.parametrize("auth_mode", ("noauth",)) -def test_tree(client, tmp_path): +async def test_tree(auth_mode, tmp_path, unused_tcp_port): prev_dir = os.getcwd() os.chdir(tmp_path) dname = Path(".") @@ -51,12 +64,25 @@ def test_tree(client, tmp_path): path=dname.as_posix(), format="json", ) - response = client.get("/api/contents", params={"content": 1}) - actual = response.json() - # ignore modification and creation times - clear_content_values(actual, keys=["created", "last_modified"]) - # ensure content names are ordered the same way - sort_content_by_name(actual) - sort_content_by_name(expected) - assert actual == expected - os.chdir(prev_dir) + + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) + + response = await http.get( + f"http://127.0.0.1:{unused_tcp_port}/api/contents", params={"content": 1} + ) + actual = response.json() + # ignore modification and creation times + clear_content_values(actual, keys=["created", "last_modified"]) + # ensure content names are ordered the same way + sort_content_by_name(actual) + sort_content_by_name(expected) + assert actual == expected + os.chdir(prev_dir) diff --git a/tests/test_kernels.py b/tests/test_kernels.py index eccc3164..5668fe20 100644 --- a/tests/test_kernels.py +++ b/tests/test_kernels.py @@ -5,13 +5,31 @@ import pytest from fps_kernels.kernel_server.server import KernelServer, kernels +from asphalt.core import Context +from asphalt.web.fastapi import FastAPIComponent +from fastapi import FastAPI +from httpx import AsyncClient +from httpx_ws import aconnect_ws + +from utils import configure os.environ["PYDEVD_DISABLE_FILE_VALIDATION"] = "1" +COMPONENTS = { + "app": {"type": "app"}, + "auth": {"type": "auth", "test": True}, + "contents": {"type": "contents"}, + "frontend": {"type": "frontend"}, + "lab": {"type": "lab"}, + "jupyterlab": {"type": "jupyterlab"}, + "kernels": {"type": "kernels"}, + "yjs": {"type": "yjs"}, +} + @pytest.mark.asyncio @pytest.mark.parametrize("auth_mode", ("noauth",)) -async def test_kernel_messages(client, capfd): +async def test_kernel_messages(auth_mode, capfd, unused_tcp_port): kernel_id = "kernel_id_0" kernel_name = "python3" kernelspec_path = ( @@ -33,46 +51,56 @@ async def test_kernel_messages(client, capfd): }, } - # block msg_type_0 - msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) - kernel_server.block_messages("msg_type_0") - with client.websocket_connect( - f"/api/kernels/{kernel_id}/channels?session_id=session_id_0", - ) as websocket: - websocket.send_json(msg) - sleep(0.5) - out, err = capfd.readouterr() - assert not err + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + + async with Context() as ctx, AsyncClient(): + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) + + # block msg_type_0 + msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) + kernel_server.block_messages("msg_type_0") + async with aconnect_ws( + f"http://127.0.0.1:{unused_tcp_port}/api/kernels/{kernel_id}/channels?session_id=session_id_0", + ) as websocket: + await websocket.send_json(msg) + sleep(0.5) + out, err = capfd.readouterr() + assert not err - # allow only msg_type_0 - msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) - kernel_server.allow_messages("msg_type_0") - with client.websocket_connect( - f"/api/kernels/{kernel_id}/channels?session_id=session_id_0", - ) as websocket: - websocket.send_json(msg) - sleep(0.5) - out, err = capfd.readouterr() - assert err.count("[IPKernelApp] WARNING | Unknown message type: 'msg_type_0'") == 1 + # allow only msg_type_0 + msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) + kernel_server.allow_messages("msg_type_0") + async with aconnect_ws( + f"http://127.0.0.1:{unused_tcp_port}/api/kernels/{kernel_id}/channels?session_id=session_id_0", + ) as websocket: + await websocket.send_json(msg) + sleep(0.5) + out, err = capfd.readouterr() + assert err.count("[IPKernelApp] WARNING | Unknown message type: 'msg_type_0'") == 1 - # block all messages - msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) - kernel_server.allow_messages([]) - with client.websocket_connect( - f"/api/kernels/{kernel_id}/channels?session_id=session_id_0", - ) as websocket: - websocket.send_json(msg) - sleep(0.5) - out, err = capfd.readouterr() - assert not err + # block all messages + msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) + kernel_server.allow_messages([]) + async with aconnect_ws( + f"http://127.0.0.1:{unused_tcp_port}/api/kernels/{kernel_id}/channels?session_id=session_id_0", + ) as websocket: + await websocket.send_json(msg) + sleep(0.5) + out, err = capfd.readouterr() + assert not err - # allow all messages - msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) - kernel_server.allow_messages() - with client.websocket_connect( - f"/api/kernels/{kernel_id}/channels?session_id=session_id_0", - ) as websocket: - websocket.send_json(msg) - sleep(0.5) - out, err = capfd.readouterr() - assert err.count("[IPKernelApp] WARNING | Unknown message type: 'msg_type_0'") == 1 + # allow all messages + msg["header"]["msg_id"] = str(int(msg["header"]["msg_id"]) + 1) + kernel_server.allow_messages() + async with aconnect_ws( + f"http://127.0.0.1:{unused_tcp_port}/api/kernels/{kernel_id}/channels?session_id=session_id_0", + ) as websocket: + await websocket.send_json(msg) + sleep(0.5) + out, err = capfd.readouterr() + assert err.count("[IPKernelApp] WARNING | Unknown message type: 'msg_type_0'") == 1 diff --git a/tests/test_server.py b/tests/test_server.py index e2c1f0af..fbd09bf1 100644 --- a/tests/test_server.py +++ b/tests/test_server.py @@ -69,7 +69,7 @@ async def test_rest_api(start_jupyverse): kernel_id = r["kernel"]["id"] # get the room ID for the document response = requests.put( - f"{url}/api/yjs/roomid/{path}", + f"{url}/api/collaboration/session/{path}", data=json.dumps( { "format": "json", @@ -77,8 +77,9 @@ async def test_rest_api(start_jupyverse): } ), ) - document_id = response.text - async with connect(f"{ws_url}/api/yjs/{document_id}") as websocket: + file_id = response.json()["fileId"] + document_id = f"json:notebook:{file_id}" + async with connect(f"{ws_url}/api/collaboration/room/{document_id}") as websocket: # connect to the shared notebook document ydoc = Y.YDoc() WebsocketProvider(ydoc, websocket) @@ -95,10 +96,11 @@ async def test_rest_api(start_jupyverse): } ), ) + print(f"{url}/api/kernels/{kernel_id}/execute", response.json()) # wait for Y model to be updated await asyncio.sleep(0.5) # retrieve cells - cells = ydoc.get_array("cells").to_json() + cells = json.loads(ydoc.get_array("cells").to_json()) assert cells[0]["outputs"] == [ { "data": {"text/plain": ["3"]}, diff --git a/tests/test_settings.py b/tests/test_settings.py index fd21f92c..e5af954e 100644 --- a/tests/test_settings.py +++ b/tests/test_settings.py @@ -1,29 +1,62 @@ import json import pytest +from asphalt.core import Context +from asphalt.web.fastapi import FastAPIComponent +from fastapi import FastAPI +from httpx import AsyncClient + +from utils import configure + test_theme = {"raw": '{// jupyverse test\n"theme": "JupyterLab Dark"}'} +COMPONENTS = { + "app": {"type": "app"}, + "auth": {"type": "auth", "test": True}, + "contents": {"type": "contents"}, + "frontend": {"type": "frontend"}, + "lab": {"type": "lab"}, + "jupyterlab": {"type": "jupyterlab"}, + "kernels": {"type": "kernels"}, + "yjs": {"type": "yjs"}, +} + +@pytest.mark.asyncio @pytest.mark.parametrize("auth_mode", ("noauth",)) -def test_settings(client): - # get previous theme - response = client.get("/lab/api/settings/@jupyterlab/apputils-extension:themes") - assert response.status_code == 200 - theme = {"raw": json.loads(response.content)["raw"]} - # put new theme - response = client.put( - "/lab/api/settings/@jupyterlab/apputils-extension:themes", - data=json.dumps(test_theme), - ) - assert response.status_code == 204 - # get new theme - response = client.get("/lab/api/settings/@jupyterlab/apputils-extension:themes") - assert response.status_code == 200 - assert json.loads(response.content)["raw"] == test_theme["raw"] - # put previous theme back - response = client.put( - "/lab/api/settings/@jupyterlab/apputils-extension:themes", - data=json.dumps(theme), - ) - assert response.status_code == 204 +async def test_settings(auth_mode, unused_tcp_port): + components = configure(COMPONENTS, {"auth": {"mode": auth_mode}}) + application = FastAPI() + + async with Context() as ctx, AsyncClient() as http: + await FastAPIComponent( + components=components, + app=application, + port=unused_tcp_port, + ).start(ctx) + + # get previous theme + response = await http.get( + f"http://127.0.0.1:{unused_tcp_port}/lab/api/settings/@jupyterlab/apputils-extension:themes" + ) + assert response.status_code == 200 + theme = {"raw": json.loads(response.content)["raw"]} + # put new theme + response = await http.put( + f"http://127.0.0.1:{unused_tcp_port}/lab/api/settings/@jupyterlab/apputils-extension:themes", + data=json.dumps(test_theme), + ) + assert response.status_code == 204 + # get new theme + response = await http.get( + f"http://127.0.0.1:{unused_tcp_port}/lab/api/settings/@jupyterlab/apputils-extension:themes" + ) + assert response.status_code == 200 + assert json.loads(response.content)["raw"] == test_theme["raw"] + # put previous theme back + response = await http.put( + f"http://127.0.0.1:{unused_tcp_port}/lab/api/settings/@jupyterlab/apputils-extension:themes", + data=json.dumps(theme), + ) + assert response.status_code == 204 diff --git a/tests/utils.py b/tests/utils.py index 95ed689f..0eb71b3c 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,6 +1,73 @@ +from copy import deepcopy +from uuid import uuid4 from typing import Dict, List, Optional +async def authenticate_client(http, port, permissions={}): + # create a new user + username = uuid4().hex + # if logged in, log out + first_time = True + while True: + response = await http.get(f"http://127.0.0.1:{port}/api/me") + if response.status_code == 403: + break + assert first_time + response = await http.post(f"http://127.0.0.1:{port}/auth/logout") + assert response.status_code == 200 + first_time = False + + # register user + register_body = { + "email": f"{username}@example.com", + "password": username, + "username": username, + "permissions": permissions, + } + response = await http.post(f"http://127.0.0.1:{port}/auth/register", json=register_body) + # check that we cannot register if not logged in + assert response.status_code == 403 + # log in as admin + login_body = {"username": "admin@jupyter.com", "password": "jupyverse"} + response = await http.post(f"http://127.0.0.1:{port}/auth/login", data=login_body) + assert response.status_code == 200 + # register user + response = await http.post(f"http://127.0.0.1:{port}/auth/register", json=register_body) + assert response.status_code == 201 + + # log out + response = await http.post(f"http://127.0.0.1:{port}/auth/logout") + assert response.status_code == 200 + # check that we can't get our identity, since we're not logged in + response = await http.get(f"http://127.0.0.1:{port}/api/me") + assert response.status_code == 403 + + # log in with registered user + login_body = {"username": f"{username}@example.com", "password": username} + response = await http.post(f"http://127.0.0.1:{port}/auth/login", data=login_body) + assert response.status_code == 200 + # we should now have a cookie + assert "fastapiusersauth" in http.cookies + # check our identity, since we're logged in + response = await http.get( + f"http://127.0.0.1:{port}/api/me", params={"permissions": permissions} + ) + assert response.status_code == 200 + me = response.json() + assert me["identity"]["username"] == username + # check our permissions + assert me["permissions"] == permissions + + +def configure(components, config): + # TODO: generalize to arbitrary nested dictionaries, not just one level + _components = deepcopy(components) + for k1, v1 in config.items(): + for k2, v2 in v1.items(): + _components[k1][k2] = v2 + return _components + + def create_content( content: Optional[List], type: str,