diff --git a/auth.py b/auth.py new file mode 100644 index 0000000..d23fc1c --- /dev/null +++ b/auth.py @@ -0,0 +1,241 @@ +"""Headscale WebUI authentication abstraction.""" + +import secrets +from functools import wraps +from typing import Awaitable, Callable, Literal, ParamSpec, TypeVar + +import requests +from flask import current_app +from flask.typing import ResponseReturnValue +from flask_basicauth import BasicAuth # type: ignore +from flask_oidc import OpenIDConnect # type: ignore +from pydantic import AnyHttpUrl, AnyUrl, BaseModel, Field + +from config import BasicAuthConfig, Config, OidcAuthConfig + + +class OidcSecretsModel(BaseModel): + """OIDC secrets model used by the flask_oidc module.""" + + class OidcWebModel(BaseModel): + """OIDC secrets web model.""" + + issuer: AnyHttpUrl + auth_uri: AnyHttpUrl + client_id: str + client_secret: str = Field(hidden=True) + redirect_uris: list[AnyUrl] + userinfo_uri: AnyHttpUrl | None + token_uri: AnyHttpUrl + + web: OidcWebModel + + +class OpenIdProviderMetadata(BaseModel): + """OIDC Provider Metadata model. + + From https://openid.net/specs/openid-connect-discovery-1_0.html#ProviderMetadata + + TODO: Add default factories for some fields and maybe descriptions. + """ + + class Config: + """BaseModel configuration.""" + + extra = "allow" + """Used for logout_redirect_uri.""" + + issuer: AnyHttpUrl + authorization_endpoint: AnyHttpUrl + token_endpoint: AnyHttpUrl + userinfo_endpoint: AnyHttpUrl | None + jwks_uri: AnyHttpUrl + registration_endpoint: AnyHttpUrl | None + scopes_supported: list[str] + response_types_supported: list[ + Literal[ + "code", + "id_token", + "id_token token", + "code id_token", + "code token", + "code id_token token", + ] + ] + response_modes_supported: list[Literal["query", "fragment"]] | None + grant_types_supported: list[str] | None + acr_values_supported: list[str] | None + subject_types_supported: list[str] + id_token_signing_alg_values_supported: list[str] + id_token_encryption_alg_values_supported: list[str] | None + id_token_encryption_enc_values_supported: list[str] | None + userinfo_signing_alg_values_supported: list[str | None] | None + userinfo_encryption_alg_values_supported: list[str] | None + userinfo_encryption_enc_values_supported: list[str] | None + request_object_signing_alg_values_supported: list[str] | None + request_object_encryption_alg_values_supported: list[str] | None + request_object_encryption_enc_values_supported: list[str] | None + token_endpoint_auth_methods_supported: list[str] | None + token_endpoint_auth_signing_alg_values_supported: list[str] | None + display_values_supported: list[Literal["page", "popup", "touch", "wap"]] | None + claim_types_supported: list[Literal["normal", "aggregated", "distributed"]] | None + claims_supported: list[str] | None + service_documentation: AnyUrl | None + claims_locales_supported: list[str] | None + ui_locales_supported: list[str] | None + claims_parameter_supported: bool = Field(False) + request_parameter_supported: bool = Field(False) + request_uri_parameter_supported: bool = Field(True) + require_request_uri_registration: bool = Field(False) + op_policy_uri: AnyUrl | None + op_tos_uri: AnyUrl | None + + +T = TypeVar("T") +P = ParamSpec("P") + + +class AuthManager: + """Authentication manager.""" + + def __init__(self, config: Config, request_timeout: float = 10) -> None: + """Initialize the authentication manager. + + Arguments: + config -- main application configuration. + + Keyword Arguments: + request_timeout -- timeout for OIDC request (default: {10}) + """ + self._gui_url = config.domain_name + config.base_path + self._auth_type = config.auth_type + self._auth_config = config.auth_type.config + self._logout_url: str | None = None + self._request_timeout = request_timeout + + match self._auth_config: + case BasicAuthConfig(): + current_app.logger.info( + "Loading basic auth libraries and configuring app..." + ) + + current_app.config["BASIC_AUTH_USERNAME"] = self._auth_config.username + current_app.config["BASIC_AUTH_PASSWORD"] = self._auth_config.password + current_app.config["BASIC_AUTH_FORCE"] = True + + # TODO: Change for flask-httpauth – flask_basicauth is not maintained. + self._auth_handler = BasicAuth(current_app) + case OidcAuthConfig(): + current_app.logger.info("Loading OIDC libraries and configuring app...") + + oidc_info = OpenIdProviderMetadata.parse_obj( + requests.get( + self._auth_config.auth_url, timeout=request_timeout + ).json() + ) + current_app.logger.debug( + "JSON dump for OIDC_INFO: %s", oidc_info.json() + ) + + client_secrets = OidcSecretsModel( + web=OidcSecretsModel.OidcWebModel( + issuer=oidc_info.issuer, + auth_uri=oidc_info.authorization_endpoint, + client_id=self._auth_config.client_id, + client_secret=self._auth_config.secret, + redirect_uris=[ + AnyUrl( + f"{config.domain_name}{config.base_path}/oidc_callback", + scheme="", + ) + ], + userinfo_uri=oidc_info.userinfo_endpoint, + token_uri=oidc_info.token_endpoint, + ) + ) + + # Make the best effort to create the data directory. + try: + config.app_data_dir.mkdir(parents=True, exist_ok=True) + except PermissionError: + current_app.logger.warning( + "Tried and failed to create data directory %s.", + config.app_data_dir, + ) + + oidc_secrets_path = config.app_data_dir / "secrets.json" + with open(oidc_secrets_path, "w+", encoding="utf-8") as secrets_file: + secrets_file.write(client_secrets.json()) + + current_app.config.update( # type: ignore + { + "SECRET_KEY": secrets.token_urlsafe(32), + "TESTING": config.debug_mode, + "DEBUG": config.debug_mode, + "OIDC_CLIENT_SECRETS": oidc_secrets_path, + "OIDC_ID_TOKEN_COOKIE_SECURE": True, + "OIDC_REQUIRE_VERIFIED_EMAIL": False, + "OIDC_USER_INFO_ENABLED": True, + "OIDC_OPENID_REALM": "Headscale-WebUI", + "OIDC_SCOPES": ["openid", "profile", "email"], + "OIDC_INTROSPECTION_AUTH_METHOD": "client_secret_post", + } + ) + + self._logout_url = getattr(oidc_info, "end_session_endpoint", None) + + self._auth_handler = OpenIDConnect(current_app) + + def require_login( + self, + func: Callable[P, ResponseReturnValue] + | Callable[P, Awaitable[ResponseReturnValue]], + ) -> Callable[P, ResponseReturnValue]: + """Guard decorator used for restricting access to the Flask page. + + Uses OIDC or Basic auth depending on configuration. + """ + + @wraps(func) + def wrapper(*args: P.args, **kwargs: P.kwargs) -> ResponseReturnValue: + sync_func = current_app.ensure_sync(func) # type: ignore + sync_func.__name__ = f"{func.__name__}" + + # OIDC + # TODO: Add user group restrictions. + if isinstance(self._auth_handler, OpenIDConnect): + return self._auth_handler.require_login(sync_func)( # type: ignore + *args, **kwargs + ) + + # Basic auth + return self._auth_handler.required(sync_func)( # type: ignore + *args, **kwargs + ) + + return wrapper + + def logout(self) -> str | None: + """Execute logout with the auth provider.""" + # Logout is only applicable for OIDC. + if isinstance(self._auth_handler, OpenIDConnect): + self._auth_handler.logout() + + if isinstance(self._auth_config, OidcAuthConfig): + if self._logout_url is not None: + logout_url = self._logout_url + if self._auth_config.logout_redirect_uri is not None: + logout_url += ( + "?post_logout_redirect_uri=" + + self._auth_config.logout_redirect_uri + ) + return logout_url + + return None + + @property + def oidc_handler(self) -> OpenIDConnect | None: + """Get the OIDC handler if exists.""" + if isinstance(self._auth_handler, OpenIDConnect): + return self._auth_handler + return None diff --git a/config.py b/config.py new file mode 100644 index 0000000..f201316 --- /dev/null +++ b/config.py @@ -0,0 +1,527 @@ +"""Headscale WebUI configuration.""" + +import importlib.metadata +import itertools +import os +from dataclasses import dataclass +from datetime import datetime +from enum import StrEnum +from logging import getLevelNamesMapping +from pathlib import Path +from typing import Any, Type +from zoneinfo import ZoneInfo, ZoneInfoNotFoundError + +from aiohttp import ClientConnectionError +from flask import current_app +from pydantic import validator # type: ignore +from pydantic import ( + AnyUrl, + BaseModel, + BaseSettings, + ConstrainedStr, + Field, + ValidationError, +) + +import helper + + +class OidcAuthConfig(BaseSettings): + """OpenID Connect authentication configuration. + + Used only if "AUTH_TYPE" environment variable is set to "oidc". + """ + + auth_url: str = Field( + ..., + env="OIDC_AUTH_URL", + description=( + "URL to OIDC auth endpoint. Example: " + '"https://example.com/.well-known/openid-configuration"' + ), + ) + client_id: str = Field( + env="OIDC_CLIENT_ID", + description="OIDC client ID.", + ) + secret: str = Field( + env="OIDC_CLIENT_SECRET", + description="OIDC client secret.", + ) + logout_redirect_uri: str | None = Field( + None, + env="OIDC_LOGOUT_REDIRECT_URI", + description="Optional OIDC redirect URL to follow after logout.", + ) + + +class BasicAuthConfig(BaseSettings): + """Basic auth authentication configuration. + + Used only if "AUTH_TYPE" environment variable is set to "basic". + """ + + username: str = Field(env="BASIC_AUTH_USER", description="Username for basic auth.") + password: str = Field(env="BASIC_AUTH_PASS", description="Password for basic auth.") + + +class AuthType(StrEnum): + """Authentication type.""" + + BASIC = "basic" + OIDC = "oidc" + + @property + def config(self): + """Get configuration depending on enum value.""" + match self: + case self.BASIC: + return BasicAuthConfig() # type: ignore + case self.OIDC: + return OidcAuthConfig() # type: ignore + + +class _LowerConstr(ConstrainedStr): + """String with lowercase transformation.""" + + to_lower = True + + +@dataclass +class InitCheckErrorModel: + """Initialization check error model.""" + + title: str + details: str + + def print_to_logger(self): + """Print the error information to logger.""" + current_app.logger.critical(self.title) + + def format_message(self) -> str: + """Format message for the error page.""" + return helper.format_message( + helper.MessageErrorType.ERROR, self.title, f"

{self.details}

" + ) + + +@dataclass +class InitCheckError(RuntimeError): + """Initialization check error.""" + + errors: list[InitCheckErrorModel] | InitCheckErrorModel | None = None + + def append_error(self, error: InitCheckErrorModel): + """Append error to the errors collection.""" + match self.errors: + case InitCheckErrorModel(): + self.errors = [self.errors, error] + case list(): + self.errors.append(error) + case _: + self.errors = error + + def __iter__(self): # noqa + match self.errors: + case InitCheckErrorModel(): + yield self.errors + case list(): + for error in self.errors: + yield error + case _: + return + + @classmethod + def from_validation_error(cls, error: ValidationError): + """Create an InitCheckError from Pydantic's ValidationError.""" + current_app.logger.critical( + "Following environment variables are required but are not declared or have " + "an invalid value:" + ) + + new_error = cls() + for sub_pydantic_error in error.errors(): + pydantic_name = sub_pydantic_error["loc"][0] + assert isinstance( + pydantic_name, str + ), "Configuration class malformed. Raise issue on GitHub." + + model: Type[BaseModel] = error.model # type: ignore + field = model.__fields__[pydantic_name] + assert ( + "env" in field.field_info.extra + ), "Environment variable name not set. Raise issue on GitHub." + + current_app.logger.critical( + " %s with type %s: %s", + field.field_info.extra["env"], + field.type_.__name__, + sub_pydantic_error["type"], + ) + + new_error.append_error( + InitCheckErrorModel( + f"Environment error for {field.field_info.extra['env']}", + f"Required variable {field.field_info.extra['env']} with type " + f'"{field.type_.__name__}" validation error ' + f"({sub_pydantic_error['type']}): {sub_pydantic_error['msg']}. " + f"Variable description: {field.field_info.description}", + ) + ) + return new_error + + @classmethod + def from_client_connection_error(cls, error: ClientConnectionError): + """Create an InitCheckError from aiohttp's ClientConnectionError.""" + return InitCheckError( + InitCheckErrorModel( + "Headscale server API is unreachable.", + "Your headscale server is either unreachable or not properly " + "configured. Please ensure your configuration is correct. Error" + f"details: {error}", + ) + ) + + @classmethod + def from_exception(cls, error: Exception, print_to_logger: bool = True): + """Create an InitCheckError from any error. + + Some special cases are handled separately. + """ + if isinstance(error, InitCheckError): + new_error = error + elif isinstance(error, ValidationError): + new_error = cls.from_validation_error(error) + elif isinstance(error, ClientConnectionError): + new_error = cls.from_client_connection_error(error) + else: + new_error = cls( + InitCheckErrorModel( + f"Unexpected error occurred: {error.__class__.__name__}. Raise an " + "issue on GitHub.", + str(error), + ) + ) + if print_to_logger: + for sub_error in new_error: + sub_error.print_to_logger() + + return new_error + + +def _get_version_from_package(): + """Get package version from metadata if not given from environment.""" + return importlib.metadata.version("headscale-webui") + + +# Functions to get git-related information in development scenario, where no relevant +# environment variables are set. If not in git repository fall back to unknown values. +# GitPython is added as dev dependency, thus we need to have fallback in case of +# production environment. +try: + from git.exc import GitError + from git.repo import Repo + + def _get_default_git_branch() -> str: + try: + return Repo(search_parent_directories=True).head.ref.name + except GitError as error: + return f"Error getting branch name: {error}" + + def _get_default_git_commit() -> str: + try: + return Repo(search_parent_directories=True).head.ref.object.hexsha + except GitError as error: + return f"Error getting commit ID: {error}" + + def _get_default_git_repo_url_gitpython() -> str | None: + try: + return ( + Repo(search_parent_directories=True) + .remotes[0] + .url.replace("git@github.com:", "https://github.com/") + .removesuffix(".git") + ) + except (GitError, IndexError): + return None + +except ImportError: + + def _get_default_git_branch() -> str: + return "UNKNOWN" + + def _get_default_git_commit() -> str: + return "UNKNOWN" + + def _get_default_git_repo_url_gitpython() -> str | None: + return None + + +def _get_default_git_repo_url(): + gitpython = _get_default_git_repo_url_gitpython() + return ( + "https://github.com/iFargle/headscale-webui" if gitpython is None else gitpython + ) + + +class Config(BaseSettings): + """Headscale WebUI configuration. + + `env` arg means what is the environment variable called. + """ + + color: _LowerConstr = Field( + "red", + env="COLOR", + description=( + "Preferred color scheme. See the MaterializeCSS docs " + "(https://materializecss.github.io/materialize/color.html#palette) for " + 'examples. Only set the "base" color, e.g., instead of `blue-gray ' + "darken-1` use `blue-gray`." + ), + ) + auth_type: AuthType = Field( + AuthType.BASIC, + env="AUTH_TYPE", + description="Authentication type.", + ) + log_level_name: str = Field( + "INFO", + env="LOG_LEVEL", + description=( + 'Logger level. If "DEBUG", Flask debug mode is activated, so don\'t use it ' + "in production." + ), + ) + debug_mode: bool = Field( + False, + env="DEBUG_MODE", + description="Enable Flask debug mode.", + ) + # TODO: Use user's locale to present datetime, not from server-side constant. + timezone: ZoneInfo = Field( + "UTC", + env="TZ", + description='Default time zone in IANA format. Example: "Asia/Tokyo".', + ) + key: str = Field( + env="KEY", + description=( + "Encryption key. Set this to a random value generated from " + "`openssl rand -base64 32`." + ), + ) + + app_version: str = Field( + default_factory=_get_version_from_package, + env="APP_VERSION", + description="Application version. Should be set by Docker.", + ) + build_date: datetime = Field( + default_factory=datetime.now, + env="BUILD_DATE", + description="Application build date. Should be set by Docker.", + ) + git_branch: str = Field( + default_factory=_get_default_git_branch, + env="GIT_BRANCH", + description="Application git branch. Should be set by Docker.", + ) + git_commit: str = Field( + default_factory=_get_default_git_commit, + env="GIT_COMMIT", + description="Application git commit. Should be set by Docker.", + ) + git_repo_url: AnyUrl = Field( + default_factory=_get_default_git_repo_url, + env="GIT_REPO_URL", + description=( + "Application git repository URL. " + "Set automatically either to local or default repository." + ), + ) + + # TODO: Autogenerate in headscale_api. + hs_version: str = Field( + "UNKNOWN", + env="HS_VERSION", + description=( + "Version of Headscale this is compatible with. Should be set by Docker." + ), + ) + hs_server: AnyUrl = Field( + "http://localhost:5000", + env="HS_SERVER", + description="The URL of your Headscale control server.", + ) + hs_config_path: Path = Field( + None, + env="HS_CONFIG_PATH", + description=( + "Path to the Headscale configuration. Default paths are tried if not set." + ), + ) + + domain_name: AnyUrl = Field( + "http://localhost:5000", + env="DOMAIN_NAME", + description="Base domain name of the Headscale WebUI.", + ) + base_path: str = Field( + "", + env="SCRIPT_NAME", + description=( + 'The "Base Path" for hosting. For example, if you want to host on ' + "http://example.com/admin, set this to `/admin`, otherwise remove this " + "variable entirely." + ), + ) + + app_data_dir: Path = Field( + Path("/data"), + env="APP_DATA_DIR", + description="Application data path.", + ) + + @validator("log_level_name") + @classmethod + def validate_log_level_name(cls, value: Any): + """Validate log_level_name field. + + Check if matches allowed log level from logging Python module. + """ + assert isinstance(value, str) + value = value.upper() + allowed_levels = getLevelNamesMapping() + if value not in allowed_levels: + raise ValueError( + f'Unkown log level "{value}". Select from: ' + + ", ".join(allowed_levels.keys()) + ) + return value + + @validator("timezone", pre=True) + @classmethod + def validate_timezone(cls, value: Any): + """Validate and parse timezone information.""" + try: + return ZoneInfo(value) + except ZoneInfoNotFoundError as error: + raise ValueError(f"Timezone {value} is invalid: {error}") from error + + @validator("hs_config_path", pre=True) + @classmethod + def validate_hs_config_path(cls, value: Any): + """Validate Headscale configuration path. + + If none is given, some default paths that Headscale itself is using for lookup + are searched. + """ + if value is None: + search_base = ["/etc/headscale", Path.home() / ".headscale"] + suffixes = ["yml", "yaml", "json"] + else: + assert isinstance(value, (str, Path)) + search_base = [value] + suffixes = [""] + + for base, suffix in itertools.product(search_base, suffixes): + cur_path = f"{base}/config.{suffix}" + if os.access(cur_path, os.R_OK): + return cur_path + + raise InitCheckError( + InitCheckErrorModel( + "Headscale configuration read failed.", + "Please ensure your headscale configuration file resides in " + '/etc/headscale or in ~/.headscale and is named "config.yaml", ' + '"config.yml" or "config.json".', + ) + ) + + @validator("base_path") + @classmethod + def validate_base_path(cls, value: Any): + """Validate base path.""" + assert isinstance(value, str) + if value == "/": + return "" + return value + + @validator("app_data_dir") + @classmethod + def validate_app_data_dir(cls, value: Path): + """Validate application data format and basic filesystem access.""" + err = InitCheckError() + + if not os.access(value, os.R_OK): + err.append_error( + InitCheckErrorModel( + f"Data ({value}) folder not readable.", + f'"{value}" is not readable. Please ensure your permissions are ' + "correct. Data should be readable by UID/GID 1000:1000.", + ) + ) + + if not os.access(value, os.W_OK): + err.append_error( + InitCheckErrorModel( + f"Data ({value}) folder not writable.", + f'"{value}" is not writable. Please ensure your permissions are ' + "correct. Data should be writable by UID/GID 1000:1000.", + ) + ) + + if not os.access(value, os.X_OK): + err.append_error( + InitCheckErrorModel( + f"Data ({value}) folder not executable.", + f'"{value}" is not executable. Please ensure your permissions are ' + "correct. Data should be executable by UID/GID 1000:1000.", + ) + ) + + key_file = value / "key.txt" + if key_file.exists(): + if not os.access(key_file, os.R_OK): + err.append_error( + InitCheckErrorModel( + f"Key file ({key_file}) not readable.", + f'"{key_file}" is not readable. Please ensure your permissions ' + "are correct. It should be readable by UID/GID 1000:1000.", + ) + ) + + if not os.access(key_file, os.W_OK): + err.append_error( + InitCheckErrorModel( + f"Key file ({key_file}) not writable.", + f'"{key_file}" is not writable. Please ensure your permissions ' + "are correct. It should be writable by UID/GID 1000:1000.", + ) + ) + + if err.errors is not None: + raise err + + return value + + @property + def log_level(self) -> int: + """Get integer log level.""" + return getLevelNamesMapping()[self.log_level_name] + + @property + def color_nav(self): + """Get navigation color.""" + return f"{self.color} darken-1" + + @property + def color_btn(self): + """Get button color.""" + return f"{self.color} darken-3" + + @property + def key_file(self): + """Get key file path.""" + return self.app_data_dir / "key.txt" diff --git a/headscale.py b/headscale.py index 866a37a..50dd6c7 100644 --- a/headscale.py +++ b/headscale.py @@ -1,488 +1,129 @@ -import json -import logging -import os -from datetime import date, timedelta +"""Headscale API abstraction.""" + +from functools import wraps +from typing import Awaitable, Callable, ParamSpec, TypeVar -import requests -import yaml from cryptography.fernet import Fernet -from dateutil import parser -from flask import Flask +from flask import current_app, redirect, url_for +from flask.typing import ResponseReturnValue +from headscale_api.config import HeadscaleConfig as HeadscaleConfigBase +from headscale_api.headscale import Headscale, UnauthorizedError +from pydantic import ValidationError -LOG_LEVEL = os.environ["LOG_LEVEL"].replace('"', "").upper() -# Initiate the Flask application and logging: -app = Flask(__name__, static_url_path="/static") -match LOG_LEVEL: - case "DEBUG": - app.logger.setLevel(logging.DEBUG) - case "INFO": - app.logger.setLevel(logging.INFO) - case "WARNING": - app.logger.setLevel(logging.WARNING) - case "ERROR": - app.logger.setLevel(logging.ERROR) - case "CRITICAL": - app.logger.setLevel(logging.CRITICAL) +from config import Config + +T = TypeVar("T") +P = ParamSpec("P") -################################################################## -# Functions related to HEADSCALE and API KEYS -################################################################## -def get_url(inpage=False): - if not inpage: - return os.environ["HS_SERVER"] - config_file = "" - try: - config_file = open("/etc/headscale/config.yml", "r") - app.logger.info("Opening /etc/headscale/config.yml") - except: - config_file = open("/etc/headscale/config.yaml", "r") - app.logger.info("Opening /etc/headscale/config.yaml") - config_yaml = yaml.safe_load(config_file) - if "server_url" in config_yaml: - return str(config_yaml["server_url"]) - app.logger.warning( - "Failed to find server_url in the config. Falling back to ENV variable" - ) - return os.environ["HS_SERVER"] +class HeadscaleApi(Headscale): + """Headscale API abstraction.""" + def __init__(self, config: Config, requests_timeout: float = 10): + """Initialize the Headscale API abstraction. -def set_api_key(api_key): - # User-set encryption key - encryption_key = os.environ["KEY"] - # Key file on the filesystem for persistent storage - key_file = open("/data/key.txt", "wb+") - # Preparing the Fernet class with the key - fernet = Fernet(encryption_key) - # Encrypting the key - encrypted_key = fernet.encrypt(api_key.encode()) - # Return true if the file wrote correctly - return True if key_file.write(encrypted_key) else False + Arguments: + config -- Headscale WebUI configuration. - -def get_api_key(): - if not os.path.exists("/data/key.txt"): - return False - # User-set encryption key - encryption_key = os.environ["KEY"] - # Key file on the filesystem for persistent storage - key_file = open("/data/key.txt", "rb+") - # The encrypted key read from the file - enc_api_key = key_file.read() - if enc_api_key == b"": - return "NULL" - - # Preparing the Fernet class with the key - fernet = Fernet(encryption_key) - # Decrypting the key - decrypted_key = fernet.decrypt(enc_api_key).decode() - - return decrypted_key - - -def test_api_key(url, api_key): - response = requests.get( - str(url) + "/api/v1/apikey", - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.status_code - - -# Expires an API key -def expire_key(url, api_key): - payload = {"prefix": str(api_key[0:10])} - json_payload = json.dumps(payload) - app.logger.debug( - "Sending the payload '" + str(json_payload) + "' to the headscale server" - ) - - response = requests.post( - str(url) + "/api/v1/apikey/expire", - data=json_payload, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.status_code - - -# Checks if the key needs to be renewed -# If it does, renews the key, then expires the old key -def renew_api_key(url, api_key): - # 0 = Key has been updated or key is not in need of an update - # 1 = Key has failed validity check or has failed to write the API key - # Check when the key expires and compare it to todays date: - key_info = get_api_key_info(url, api_key) - expiration_time = key_info["expiration"] - today_date = date.today() - expire = parser.parse(expiration_time) - expire_fmt = ( - str(expire.year) - + "-" - + str(expire.month).zfill(2) - + "-" - + str(expire.day).zfill(2) - ) - expire_date = date.fromisoformat(expire_fmt) - delta = expire_date - today_date - tmp = today_date + timedelta(days=90) - new_expiration_date = str(tmp) + "T00:00:00.000000Z" - - # If the delta is less than 5 days, renew the key: - if delta < timedelta(days=5): - app.logger.warning("Key is about to expire. Delta is " + str(delta)) - payload = {"expiration": str(new_expiration_date)} - json_payload = json.dumps(payload) - app.logger.debug( - "Sending the payload '" + str(json_payload) + "' to the headscale server" + Keyword Arguments: + requests_timeout -- timeout of API requests in seconds (default: {10}) + """ + self._config = config + self._hs_config: HeadscaleConfigBase | None = None + self._api_key: str | None = None + self.logger = current_app.logger + super().__init__( + self.base_url, + self.api_key, + requests_timeout, + raise_exception_on_error=False, + logger=current_app.logger, ) - response = requests.post( - str(url) + "/api/v1/apikey", - data=json_payload, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - new_key = response.json() - app.logger.debug("JSON: " + json.dumps(new_key)) - app.logger.debug("New Key is: " + new_key["apiKey"]) - api_key_test = test_api_key(url, new_key["apiKey"]) - app.logger.debug("Testing the key: " + str(api_key_test)) - # Test if the new key works: - if api_key_test == 200: - app.logger.info("The new key is valid and we are writing it to the file") - if not set_api_key(new_key["apiKey"]): - app.logger.error("We failed writing the new key!") - return False # Key write failed - app.logger.info("Key validated and written. Moving to expire the key.") - expire_key(url, api_key) - return True # Key updated and validated - else: - app.logger.error("Testing the API key failed.") - return False # The API Key test failed - else: - return True # No work is required + @property + def app_config(self) -> Config: + """Get Headscale WebUI configuration.""" + return self._config + @property + def hs_config(self) -> HeadscaleConfigBase | None: + """Get Headscale configuration and cache on success. -# Gets information about the current API key -def get_api_key_info(url, api_key): - app.logger.info("Getting API key information") - response = requests.get( - str(url) + "/api/v1/apikey", - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - json_response = response.json() - # Find the current key in the array: - key_prefix = str(api_key[0:10]) - app.logger.info("Looking for valid API Key...") - for key in json_response["apiKeys"]: - if key_prefix == key["prefix"]: - app.logger.info("Key found.") - return key - app.logger.error("Could not find a valid key in Headscale. Need a new API key.") - return "Key not found" + Returns: + Headscale configuration if a valid configuration has been found. + """ + if self._hs_config is not None: + return self._hs_config + try: + return HeadscaleConfigBase.parse_file(self._config.hs_config_path) + except ValidationError as error: + self.logger.warning( + "Following errors happened when tried to parse Headscale config:" + ) + for sub_error in str(error).splitlines(): + self.logger.warning(" %s", sub_error) + return None -################################################################## -# Functions related to MACHINES -################################################################## + @property + def base_url(self) -> str: + """Get base URL of the Headscale server. + Tries to load it from Headscale config, otherwise falls back to WebUI config. + """ + if self.hs_config is None or self.hs_config.server_url is None: + self.logger.warning( + 'Failed to find "server_url" in the Headscale config. Falling back to ' + "the environment variable." + ) + return self._config.hs_server -# register a new machine -def register_machine(url, api_key, machine_key, user): - app.logger.info("Registering machine %s to user %s", str(machine_key), str(user)) - response = requests.post( - str(url) - + "/api/v1/machine/register?user=" - + str(user) - + "&key=" - + str(machine_key), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() + return self.hs_config.server_url + @property + def api_key(self) -> str | None: + """Get API key from cache or from file.""" + if self._api_key is not None: + return self._api_key -# Sets the machines tags -def set_machine_tags(url, api_key, machine_id, tags_list): - app.logger.info("Setting machine_id %s tag %s", str(machine_id), str(tags_list)) - response = requests.post( - str(url) + "/api/v1/machine/" + str(machine_id) + "/tags", - data=tags_list, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() + if not self._config.key_file.exists(): + return None + with open(self._config.key_file, "rb") as key_file: + enc_api_key = key_file.read() + if enc_api_key == b"": + return None -# Moves machine_id to user "new_user" -def move_user(url, api_key, machine_id, new_user): - app.logger.info("Moving machine_id %s to user %s", str(machine_id), str(new_user)) - response = requests.post( - str(url) + "/api/v1/machine/" + str(machine_id) + "/user?user=" + str(new_user), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() + self._api_key = Fernet(self._config.key).decrypt(enc_api_key).decode() + return self._api_key + @api_key.setter + def api_key(self, new_api_key: str): + """Write the new API key to file and store in cache.""" + with open(self._config.key_file, "wb") as key_file: + key_file.write(Fernet(self._config.key).encrypt(new_api_key.encode())) -def update_route(url, api_key, route_id, current_state): - action = "disable" if current_state == "True" else "enable" + # Save to local cache only after successful file write. + self._api_key = new_api_key - app.logger.info("Updating Route %s: Action: %s", str(route_id), str(action)) + def key_check_guard( + self, func: Callable[P, T] | Callable[P, Awaitable[T]] + ) -> Callable[P, T | ResponseReturnValue]: + """Ensure the validity of a Headscale API key with decorator. - # Debug - app.logger.debug("URL: " + str(url)) - app.logger.debug("Route ID: " + str(route_id)) - app.logger.debug("Current State: " + str(current_state)) - app.logger.debug("Action to take: " + str(action)) + Also, it checks if the key needs renewal and if it is invalid redirects to the + settings page. + """ - response = requests.post( - str(url) + "/api/v1/routes/" + str(route_id) + "/" + str(action), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() + @wraps(func) + def decorated(*args: P.args, **kwargs: P.kwargs) -> T | ResponseReturnValue: + try: + return current_app.ensure_sync(func)(*args, **kwargs) # type: ignore + except UnauthorizedError: + current_app.logger.warning( + "Detected unauthorized error from Headscale API. " + "Redirecting to settings." + ) + return redirect(url_for("settings_page")) - -# Get all machines on the Headscale network -def get_machines(url, api_key): - app.logger.info("Getting machine information") - response = requests.get( - str(url) + "/api/v1/machine", - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() - - -# Get machine with "machine_id" on the Headscale network -def get_machine_info(url, api_key, machine_id): - app.logger.info("Getting information for machine ID %s", str(machine_id)) - response = requests.get( - str(url) + "/api/v1/machine/" + str(machine_id), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() - - -# Delete a machine from Headscale -def delete_machine(url, api_key, machine_id): - app.logger.info("Deleting machine %s", str(machine_id)) - response = requests.delete( - str(url) + "/api/v1/machine/" + str(machine_id), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - if response.status_code == 200: - app.logger.info("Machine deleted.") - else: - app.logger.error("Deleting machine failed! %s", str(response.json())) - return {"status": status, "body": response.json()} - - -# Rename "machine_id" with name "new_name" -def rename_machine(url, api_key, machine_id, new_name): - app.logger.info("Renaming machine %s", str(machine_id)) - response = requests.post( - str(url) + "/api/v1/machine/" + str(machine_id) + "/rename/" + str(new_name), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - if response.status_code == 200: - app.logger.info("Machine renamed") - else: - app.logger.error("Machine rename failed! %s", str(response.json())) - return {"status": status, "body": response.json()} - - -# Gets routes for the passed machine_id -def get_machine_routes(url, api_key, machine_id): - app.logger.info("Getting routes for machine %s", str(machine_id)) - response = requests.get( - str(url) + "/api/v1/machine/" + str(machine_id) + "/routes", - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - if response.status_code == 200: - app.logger.info("Routes obtained") - else: - app.logger.error("Failed to get routes: %s", str(response.json())) - return response.json() - - -# Gets routes for the entire tailnet -def get_routes(url, api_key): - app.logger.info("Getting routes") - response = requests.get( - str(url) + "/api/v1/routes", - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() - - -################################################################## -# Functions related to USERS -################################################################## - - -# Get all users in use -def get_users(url, api_key): - app.logger.info("Getting Users") - response = requests.get( - str(url) + "/api/v1/user", - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() - - -# Rename "old_name" with name "new_name" -def rename_user(url, api_key, old_name, new_name): - app.logger.info("Renaming user %s to %s.", str(old_name), str(new_name)) - response = requests.post( - str(url) + "/api/v1/user/" + str(old_name) + "/rename/" + str(new_name), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - if response.status_code == 200: - app.logger.info("User renamed.") - else: - app.logger.error("Renaming User failed!") - return {"status": status, "body": response.json()} - - -# Delete a user from Headscale -def delete_user(url, api_key, user_name): - app.logger.info("Deleting a User: %s", str(user_name)) - response = requests.delete( - str(url) + "/api/v1/user/" + str(user_name), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - if response.status_code == 200: - app.logger.info("User deleted.") - else: - app.logger.error("Deleting User failed!") - return {"status": status, "body": response.json()} - - -# Add a user from Headscale -def add_user(url, api_key, data): - app.logger.info("Adding user: %s", str(data)) - response = requests.post( - str(url) + "/api/v1/user", - data=data, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - if response.status_code == 200: - app.logger.info("User added.") - else: - app.logger.error("Adding User failed!") - return {"status": status, "body": response.json()} - - -################################################################## -# Functions related to PREAUTH KEYS in USERS -################################################################## - - -# Get all PreAuth keys associated with a user "user_name" -def get_preauth_keys(url, api_key, user_name): - app.logger.info("Getting PreAuth Keys in User %s", str(user_name)) - response = requests.get( - str(url) + "/api/v1/preauthkey?user=" + str(user_name), - headers={ - "Accept": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - return response.json() - - -# Add a preauth key to the user "user_name" given the booleans "ephemeral" -# and "reusable" with the expiration date "date" contained in the JSON payload "data" -def add_preauth_key(url, api_key, data): - app.logger.info("Adding PreAuth Key: %s", str(data)) - response = requests.post( - str(url) + "/api/v1/preauthkey", - data=data, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - if response.status_code == 200: - app.logger.info("PreAuth Key added.") - else: - app.logger.error("Adding PreAuth Key failed!") - return {"status": status, "body": response.json()} - - -# Expire a pre-auth key. data is {"user": "string", "key": "string"} -def expire_preauth_key(url, api_key, data): - app.logger.info("Expiring PreAuth Key...") - response = requests.post( - str(url) + "/api/v1/preauthkey/expire", - data=data, - headers={ - "Accept": "application/json", - "Content-Type": "application/json", - "Authorization": "Bearer " + str(api_key), - }, - ) - status = "True" if response.status_code == 200 else "False" - app.logger.debug("expire_preauth_key - Return: " + str(response.json())) - app.logger.debug("expire_preauth_key - Status: " + str(status)) - return {"status": status, "body": response.json()} + return decorated diff --git a/helper.py b/helper.py index 398f42e..b048997 100644 --- a/helper.py +++ b/helper.py @@ -1,79 +1,48 @@ -import logging -import os +"""Helper functions used for formatting.""" -import requests -from flask import Flask - -import headscale - -LOG_LEVEL = os.environ["LOG_LEVEL"].replace('"', "").upper() -# Initiate the Flask application and logging: -app = Flask(__name__, static_url_path="/static") -match LOG_LEVEL: - case "DEBUG": - app.logger.setLevel(logging.DEBUG) - case "INFO": - app.logger.setLevel(logging.INFO) - case "WARNING": - app.logger.setLevel(logging.WARNING) - case "ERROR": - app.logger.setLevel(logging.ERROR) - case "CRITICAL": - app.logger.setLevel(logging.CRITICAL) +from datetime import timedelta +from enum import StrEnum +from typing import Literal -def pretty_print_duration(duration, delta_type=""): - """Prints a duration in human-readable formats""" +def pretty_print_duration( + duration: timedelta, delta_type: Literal["expiry", ""] = "" +): # pylint: disable=too-many-return-statements + """Print a duration in human-readable format.""" days, seconds = duration.days, duration.seconds hours = days * 24 + seconds // 3600 mins = (seconds % 3600) // 60 secs = seconds % 60 if delta_type == "expiry": if days > 730: - return "in greater than two years" + return "in more than two years" if days > 365: - return "in greater than a year" + return "in more than a year" if days > 0: - return ( - "in " + str(days) + " days" if days > 1 else "in " + str(days) + " day" - ) + return f"in {days} days" if days > 1 else f"in {days} day" if hours > 0: - return ( - "in " + str(hours) + " hours" - if hours > 1 - else "in " + str(hours) + " hour" - ) + return f"in {hours} hours" if hours > 1 else f"in {hours} hour" if mins > 0: - return ( - "in " + str(mins) + " minutes" - if mins > 1 - else "in " + str(mins) + " minute" - ) - return ( - "in " + str(secs) + " seconds" - if secs >= 1 or secs == 0 - else "in " + str(secs) + " second" - ) + return f"in {mins} minutes" if mins > 1 else f"in {mins} minute" + return f"in {secs} seconds" if secs >= 1 or secs == 0 else f"in {secs} second" + if days > 730: return "over two years ago" if days > 365: return "over a year ago" if days > 0: - return str(days) + " days ago" if days > 1 else str(days) + " day ago" + return f"{days} days ago" if days > 1 else f"{days} day ago" if hours > 0: - return str(hours) + " hours ago" if hours > 1 else str(hours) + " hour ago" + return f"{hours} hours ago" if hours > 1 else f"{hours} hour ago" if mins > 0: - return str(mins) + " minutes ago" if mins > 1 else str(mins) + " minute ago" - return ( - str(secs) + " seconds ago" - if secs >= 1 or secs == 0 - else str(secs) + " second ago" - ) + return f"{mins} minutes ago" if mins > 1 else f"{mins} minute ago" + return f"{secs} seconds ago" if secs >= 1 or secs == 0 else f"{secs} second ago" -def text_color_duration(duration): - """Prints a color based on duratioin (imported as seconds)""" - +def text_color_duration( + duration: timedelta, +): # pylint: disable=too-many-return-statements + """Print a color based on duration (imported as seconds).""" days, seconds = duration.days, duration.seconds hours = days * 24 + seconds // 3600 mins = (seconds % 3600) // 60 @@ -101,280 +70,83 @@ def text_color_duration(duration): return "green-text " -def key_check(): - """Checks the validity of a Headsclae API key and renews it if it's nearing expiration""" - api_key = headscale.get_api_key() - url = headscale.get_url() - - # Test the API key. If the test fails, return a failure. - # AKA, if headscale returns Unauthorized, fail: - app.logger.info("Testing API key validity.") - status = headscale.test_api_key(url, api_key) - if status != 200: - app.logger.info( - "Got a non-200 response from Headscale. Test failed (Response: %i)", - status, - ) - return False - else: - app.logger.info("Key check passed.") - # Check if the key needs to be renewed - headscale.renew_api_key(url, api_key) - return True - - -def get_color(import_id, item_type=""): - """Sets colors for users/namespaces""" +def get_color(import_id: int, item_type: Literal["failover", "text", ""] = ""): + """Get color for users/namespaces.""" # Define the colors... Seems like a good number to start with - if item_type == "failover": - colors = [ - "teal lighten-1", - "blue lighten-1", - "blue-grey lighten-1", - "indigo lighten-2", - "brown lighten-1", - "grey lighten-1", - "indigo lighten-2", - "deep-orange lighten-1", - "yellow lighten-2", - "purple lighten-2", - ] - index = import_id % len(colors) - return colors[index] - if item_type == "text": - colors = [ - "red-text text-lighten-1", - "teal-text text-lighten-1", - "blue-text text-lighten-1", - "blue-grey-text text-lighten-1", - "indigo-text text-lighten-2", - "green-text text-lighten-1", - "deep-orange-text text-lighten-1", - "yellow-text text-lighten-2", - "purple-text text-lighten-2", - "indigo-text text-lighten-2", - "brown-text text-lighten-1", - "grey-text text-lighten-1", - ] - index = import_id % len(colors) - return colors[index] - colors = [ - "red lighten-1", - "teal lighten-1", - "blue lighten-1", - "blue-grey lighten-1", - "indigo lighten-2", - "green lighten-1", - "deep-orange lighten-1", - "yellow lighten-2", - "purple lighten-2", - "indigo lighten-2", - "brown lighten-1", - "grey lighten-1", - ] - index = import_id % len(colors) - return colors[index] + match item_type: + case "failover": + colors = [ + "teal lighten-1", + "blue lighten-1", + "blue-grey lighten-1", + "indigo lighten-2", + "brown lighten-1", + "grey lighten-1", + "indigo lighten-2", + "deep-orange lighten-1", + "yellow lighten-2", + "purple lighten-2", + ] + case "text": + colors = [ + "red-text text-lighten-1", + "teal-text text-lighten-1", + "blue-text text-lighten-1", + "blue-grey-text text-lighten-1", + "indigo-text text-lighten-2", + "green-text text-lighten-1", + "deep-orange-text text-lighten-1", + "yellow-text text-lighten-2", + "purple-text text-lighten-2", + "indigo-text text-lighten-2", + "brown-text text-lighten-1", + "grey-text text-lighten-1", + ] + case _: + colors = [ + "red lighten-1", + "teal lighten-1", + "blue lighten-1", + "blue-grey lighten-1", + "indigo lighten-2", + "green lighten-1", + "deep-orange lighten-1", + "yellow lighten-2", + "purple lighten-2", + "indigo lighten-2", + "brown lighten-1", + "grey lighten-1", + ] + return colors[import_id % len(colors)] -def format_message(error_type, title, message): - """Defines a generic 'collection' as error/warning/info messages""" - content = """ - - """ - ) + +def format_message(error_type: MessageErrorType, title: str, message: str): + """Render a "collection" as error/warning/info message.""" + content = '" return content - - -def access_checks(): - """Checks various items before each page load to ensure permissions are correct""" - url = headscale.get_url() - - # Return an error message if things fail. - # Return a formatted error message for EACH fail. - checks_passed = True # Default to true. Set to false when any checks fail. - data_readable = False # Checks R permissions of /data - data_writable = False # Checks W permissions of /data - data_executable = False # Execute on directories allows file access - file_readable = False # Checks R permissions of /data/key.txt - file_writable = False # Checks W permissions of /data/key.txt - file_exists = False # Checks if /data/key.txt exists - config_readable = False # Checks if the headscale configuration file is readable - - # Check 1: Check: the Headscale server is reachable: - server_reachable = False - response = requests.get(str(url) + "/health") - if response.status_code == 200: - server_reachable = True - else: - checks_passed = False - app.logger.critical("Headscale URL: Response 200: FAILED") - - # Check: /data is rwx for 1000:1000: - if os.access("/data/", os.R_OK): - data_readable = True - else: - app.logger.critical("/data READ: FAILED") - checks_passed = False - if os.access("/data/", os.W_OK): - data_writable = True - else: - app.logger.critical("/data WRITE: FAILED") - checks_passed = False - if os.access("/data/", os.X_OK): - data_executable = True - else: - app.logger.critical("/data EXEC: FAILED") - checks_passed = False - - # Check: /data/key.txt exists and is rw: - if os.access("/data/key.txt", os.F_OK): - file_exists = True - if os.access("/data/key.txt", os.R_OK): - file_readable = True - else: - app.logger.critical("/data/key.txt READ: FAILED") - checks_passed = False - if os.access("/data/key.txt", os.W_OK): - file_writable = True - else: - app.logger.critical("/data/key.txt WRITE: FAILED") - checks_passed = False - else: - app.logger.error("/data/key.txt EXIST: FAILED - NO ERROR") - - # Check: /etc/headscale/config.yaml is readable: - if os.access("/etc/headscale/config.yaml", os.R_OK): - config_readable = True - elif os.access("/etc/headscale/config.yml", os.R_OK): - config_readable = True - else: - app.logger.error("/etc/headscale/config.y(a)ml: READ: FAILED") - checks_passed = False - - if checks_passed: - app.logger.info("All startup checks passed.") - return "Pass" - - message_html = "" - # Generate the message: - if not server_reachable: - app.logger.critical("Server is unreachable") - message = ( - """ -

Your headscale server is either unreachable or not properly configured. - Please ensure your configuration is correct (Check for 200 status on - """ - + url - + """/api/v1 failed. Response: """ - + str(response.status_code) - + """.)

- """ - ) - - message_html += format_message("Error", "Headscale unreachable", message) - - if not config_readable: - app.logger.critical("Headscale configuration is not readable") - message = """ -

/etc/headscale/config.yaml not readable. Please ensure your - headscale configuration file resides in /etc/headscale and - is named "config.yaml" or "config.yml"

- """ - - message_html += format_message( - "Error", "/etc/headscale/config.yaml not readable", message - ) - - if not data_writable: - app.logger.critical("/data folder is not writable") - message = """ -

/data is not writable. Please ensure your - permissions are correct. /data mount should be writable - by UID/GID 1000:1000.

- """ - - message_html += format_message("Error", "/data not writable", message) - - if not data_readable: - app.logger.critical("/data folder is not readable") - message = """ -

/data is not readable. Please ensure your - permissions are correct. /data mount should be readable - by UID/GID 1000:1000.

- """ - - message_html += format_message("Error", "/data not readable", message) - - if not data_executable: - app.logger.critical("/data folder is not readable") - message = """ -

/data is not executable. Please ensure your - permissions are correct. /data mount should be readable - by UID/GID 1000:1000. (chown 1000:1000 /path/to/data && chmod -R 755 /path/to/data)

- """ - - message_html += format_message("Error", "/data not executable", message) - - if file_exists: - # If it doesn't exist, we assume the user hasn't created it yet. - # Just redirect to the settings page to enter an API Key - if not file_writable: - app.logger.critical("/data/key.txt is not writable") - message = """ -

/data/key.txt is not writable. Please ensure your - permissions are correct. /data mount should be writable - by UID/GID 1000:1000.

- """ - - message_html += format_message( - "Error", "/data/key.txt not writable", message - ) - - if not file_readable: - app.logger.critical("/data/key.txt is not readable") - message = """ -

/data/key.txt is not readable. Please ensure your - permissions are correct. /data mount should be readable - by UID/GID 1000:1000.

- """ - - message_html += format_message( - "Error", "/data/key.txt not readable", message - ) - - return message_html - - -def load_checks(): - """Bundles all the checks into a single function to call easier""" - # General error checks. See the function for more info: - if access_checks() != "Pass": - return "error_page" - # If the API key fails, redirect to the settings page: - if not key_check(): - return "settings_page" - return "Pass" diff --git a/poetry.lock b/poetry.lock index 706db36..99f7650 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,26 +1,252 @@ # This file is automatically @generated by Poetry 1.4.2 and should not be changed by hand. +[[package]] +name = "aiohttp" +version = "3.8.4" +description = "Async http client/server framework (asyncio)" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:5ce45967538fb747370308d3145aa68a074bdecb4f3a300869590f725ced69c1"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b744c33b6f14ca26b7544e8d8aadff6b765a80ad6164fb1a430bbadd593dfb1a"}, + {file = "aiohttp-3.8.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a45865451439eb320784918617ba54b7a377e3501fb70402ab84d38c2cd891b"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a86d42d7cba1cec432d47ab13b6637bee393a10f664c425ea7b305d1301ca1a3"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee3c36df21b5714d49fc4580247947aa64bcbe2939d1b77b4c8dcb8f6c9faecc"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:176a64b24c0935869d5bbc4c96e82f89f643bcdf08ec947701b9dbb3c956b7dd"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c844fd628851c0bc309f3c801b3a3d58ce430b2ce5b359cd918a5a76d0b20cb5"}, + {file = "aiohttp-3.8.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5393fb786a9e23e4799fec788e7e735de18052f83682ce2dfcabaf1c00c2c08e"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e4b09863aae0dc965c3ef36500d891a3ff495a2ea9ae9171e4519963c12ceefd"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:adfbc22e87365a6e564c804c58fc44ff7727deea782d175c33602737b7feadb6"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:147ae376f14b55f4f3c2b118b95be50a369b89b38a971e80a17c3fd623f280c9"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:eafb3e874816ebe2a92f5e155f17260034c8c341dad1df25672fb710627c6949"}, + {file = "aiohttp-3.8.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6cc15d58053c76eacac5fa9152d7d84b8d67b3fde92709195cb984cfb3475ea"}, + {file = "aiohttp-3.8.4-cp310-cp310-win32.whl", hash = "sha256:59f029a5f6e2d679296db7bee982bb3d20c088e52a2977e3175faf31d6fb75d1"}, + {file = "aiohttp-3.8.4-cp310-cp310-win_amd64.whl", hash = "sha256:fe7ba4a51f33ab275515f66b0a236bcde4fb5561498fe8f898d4e549b2e4509f"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d8ef1a630519a26d6760bc695842579cb09e373c5f227a21b67dc3eb16cfea4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b3f2e06a512e94722886c0827bee9807c86a9f698fac6b3aee841fab49bbfb4"}, + {file = "aiohttp-3.8.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3a80464982d41b1fbfe3154e440ba4904b71c1a53e9cd584098cd41efdb188ef"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b631e26df63e52f7cce0cce6507b7a7f1bc9b0c501fcde69742130b32e8782f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f43255086fe25e36fd5ed8f2ee47477408a73ef00e804cb2b5cba4bf2ac7f5e"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d347a172f866cd1d93126d9b239fcbe682acb39b48ee0873c73c933dd23bd0f"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a3fec6a4cb5551721cdd70473eb009d90935b4063acc5f40905d40ecfea23e05"}, + {file = "aiohttp-3.8.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80a37fe8f7c1e6ce8f2d9c411676e4bc633a8462844e38f46156d07a7d401654"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d1e6a862b76f34395a985b3cd39a0d949ca80a70b6ebdea37d3ab39ceea6698a"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cd468460eefef601ece4428d3cf4562459157c0f6523db89365202c31b6daebb"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:618c901dd3aad4ace71dfa0f5e82e88b46ef57e3239fc7027773cb6d4ed53531"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:652b1bff4f15f6287550b4670546a2947f2a4575b6c6dff7760eafb22eacbf0b"}, + {file = "aiohttp-3.8.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80575ba9377c5171407a06d0196b2310b679dc752d02a1fcaa2bc20b235dbf24"}, + {file = "aiohttp-3.8.4-cp311-cp311-win32.whl", hash = "sha256:bbcf1a76cf6f6dacf2c7f4d2ebd411438c275faa1dc0c68e46eb84eebd05dd7d"}, + {file = "aiohttp-3.8.4-cp311-cp311-win_amd64.whl", hash = "sha256:6e74dd54f7239fcffe07913ff8b964e28b712f09846e20de78676ce2a3dc0bfc"}, + {file = "aiohttp-3.8.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:880e15bb6dad90549b43f796b391cfffd7af373f4646784795e20d92606b7a51"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bb96fa6b56bb536c42d6a4a87dfca570ff8e52de2d63cabebfd6fb67049c34b6"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4a6cadebe132e90cefa77e45f2d2f1a4b2ce5c6b1bfc1656c1ddafcfe4ba8131"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f352b62b45dff37b55ddd7b9c0c8672c4dd2eb9c0f9c11d395075a84e2c40f75"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ab43061a0c81198d88f39aaf90dae9a7744620978f7ef3e3708339b8ed2ef01"}, + {file = "aiohttp-3.8.4-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c9cb1565a7ad52e096a6988e2ee0397f72fe056dadf75d17fa6b5aebaea05622"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:1b3ea7edd2d24538959c1c1abf97c744d879d4e541d38305f9bd7d9b10c9ec41"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:7c7837fe8037e96b6dd5cfcf47263c1620a9d332a87ec06a6ca4564e56bd0f36"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:3b90467ebc3d9fa5b0f9b6489dfb2c304a1db7b9946fa92aa76a831b9d587e99"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:cab9401de3ea52b4b4c6971db5fb5c999bd4260898af972bf23de1c6b5dd9d71"}, + {file = "aiohttp-3.8.4-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:d1f9282c5f2b5e241034a009779e7b2a1aa045f667ff521e7948ea9b56e0c5ff"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win32.whl", hash = "sha256:5e14f25765a578a0a634d5f0cd1e2c3f53964553a00347998dfdf96b8137f777"}, + {file = "aiohttp-3.8.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4c745b109057e7e5f1848c689ee4fb3a016c8d4d92da52b312f8a509f83aa05e"}, + {file = "aiohttp-3.8.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:aede4df4eeb926c8fa70de46c340a1bc2c6079e1c40ccf7b0eae1313ffd33519"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ddaae3f3d32fc2cb4c53fab020b69a05c8ab1f02e0e59665c6f7a0d3a5be54f"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4eb3b82ca349cf6fadcdc7abcc8b3a50ab74a62e9113ab7a8ebc268aad35bb9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9bcb89336efa095ea21b30f9e686763f2be4478f1b0a616969551982c4ee4c3b"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c08e8ed6fa3d477e501ec9db169bfac8140e830aa372d77e4a43084d8dd91ab"}, + {file = "aiohttp-3.8.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c6cd05ea06daca6ad6a4ca3ba7fe7dc5b5de063ff4daec6170ec0f9979f6c332"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7a00a9ed8d6e725b55ef98b1b35c88013245f35f68b1b12c5cd4100dddac333"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:de04b491d0e5007ee1b63a309956eaed959a49f5bb4e84b26c8f5d49de140fa9"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:40653609b3bf50611356e6b6554e3a331f6879fa7116f3959b20e3528783e699"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:dbf3a08a06b3f433013c143ebd72c15cac33d2914b8ea4bea7ac2c23578815d6"}, + {file = "aiohttp-3.8.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:854f422ac44af92bfe172d8e73229c270dc09b96535e8a548f99c84f82dde241"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win32.whl", hash = "sha256:aeb29c84bb53a84b1a81c6c09d24cf33bb8432cc5c39979021cc0f98c1292a1a"}, + {file = "aiohttp-3.8.4-cp37-cp37m-win_amd64.whl", hash = "sha256:db3fc6120bce9f446d13b1b834ea5b15341ca9ff3f335e4a951a6ead31105480"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:fabb87dd8850ef0f7fe2b366d44b77d7e6fa2ea87861ab3844da99291e81e60f"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:91f6d540163f90bbaef9387e65f18f73ffd7c79f5225ac3d3f61df7b0d01ad15"}, + {file = "aiohttp-3.8.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:d265f09a75a79a788237d7f9054f929ced2e69eb0bb79de3798c468d8a90f945"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d89efa095ca7d442a6d0cbc755f9e08190ba40069b235c9886a8763b03785da"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4dac314662f4e2aa5009977b652d9b8db7121b46c38f2073bfeed9f4049732cd"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe11310ae1e4cd560035598c3f29d86cef39a83d244c7466f95c27ae04850f10"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ddb2a2026c3f6a68c3998a6c47ab6795e4127315d2e35a09997da21865757f8"}, + {file = "aiohttp-3.8.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e75b89ac3bd27d2d043b234aa7b734c38ba1b0e43f07787130a0ecac1e12228a"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6e601588f2b502c93c30cd5a45bfc665faaf37bbe835b7cfd461753068232074"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a5d794d1ae64e7753e405ba58e08fcfa73e3fad93ef9b7e31112ef3c9a0efb52"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:a1f4689c9a1462f3df0a1f7e797791cd6b124ddbee2b570d34e7f38ade0e2c71"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:3032dcb1c35bc330134a5b8a5d4f68c1a87252dfc6e1262c65a7e30e62298275"}, + {file = "aiohttp-3.8.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8189c56eb0ddbb95bfadb8f60ea1b22fcfa659396ea36f6adcc521213cd7b44d"}, + {file = "aiohttp-3.8.4-cp38-cp38-win32.whl", hash = "sha256:33587f26dcee66efb2fff3c177547bd0449ab7edf1b73a7f5dea1e38609a0c54"}, + {file = "aiohttp-3.8.4-cp38-cp38-win_amd64.whl", hash = "sha256:e595432ac259af2d4630008bf638873d69346372d38255774c0e286951e8b79f"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5a7bdf9e57126dc345b683c3632e8ba317c31d2a41acd5800c10640387d193ed"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:22f6eab15b6db242499a16de87939a342f5a950ad0abaf1532038e2ce7d31567"}, + {file = "aiohttp-3.8.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7235604476a76ef249bd64cb8274ed24ccf6995c4a8b51a237005ee7a57e8643"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea9eb976ffdd79d0e893869cfe179a8f60f152d42cb64622fca418cd9b18dc2a"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92c0cea74a2a81c4c76b62ea1cac163ecb20fb3ba3a75c909b9fa71b4ad493cf"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:493f5bc2f8307286b7799c6d899d388bbaa7dfa6c4caf4f97ef7521b9cb13719"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a63f03189a6fa7c900226e3ef5ba4d3bd047e18f445e69adbd65af433add5a2"}, + {file = "aiohttp-3.8.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:10c8cefcff98fd9168cdd86c4da8b84baaa90bf2da2269c6161984e6737bf23e"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bca5f24726e2919de94f047739d0a4fc01372801a3672708260546aa2601bf57"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:03baa76b730e4e15a45f81dfe29a8d910314143414e528737f8589ec60cf7391"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8c29c77cc57e40f84acef9bfb904373a4e89a4e8b74e71aa8075c021ec9078c2"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:03543dcf98a6619254b409be2d22b51f21ec66272be4ebda7b04e6412e4b2e14"}, + {file = "aiohttp-3.8.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:17b79c2963db82086229012cff93ea55196ed31f6493bb1ccd2c62f1724324e4"}, + {file = "aiohttp-3.8.4-cp39-cp39-win32.whl", hash = "sha256:34ce9f93a4a68d1272d26030655dd1b58ff727b3ed2a33d80ec433561b03d67a"}, + {file = "aiohttp-3.8.4-cp39-cp39-win_amd64.whl", hash = "sha256:41a86a69bb63bb2fc3dc9ad5ea9f10f1c9c8e282b471931be0268ddd09430b04"}, + {file = "aiohttp-3.8.4.tar.gz", hash = "sha256:bf2e1a9162c1e441bf805a1fd166e249d574ca04e03b34f97e2928769e91ab5c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "apscheduler" +version = "3.10.1" +description = "In-process task scheduler with Cron-like capabilities" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "APScheduler-3.10.1-py3-none-any.whl", hash = "sha256:e813ad5ada7aff36fb08cdda746b520531eaac7757832abc204868ba78e0c8f6"}, + {file = "APScheduler-3.10.1.tar.gz", hash = "sha256:0293937d8f6051a0f493359440c1a1b93e882c57daf0197afeff0e727777b96e"}, +] + +[package.dependencies] +pytz = "*" +setuptools = ">=0.7" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.0.0 || >=4.0.0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + +[[package]] +name = "asgiref" +version = "3.6.0" +description = "ASGI specs, helper code, and adapters" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "asgiref-3.6.0-py3-none-any.whl", hash = "sha256:71e68008da809b957b7ee4b43dbccff33d1b23519fb8344e33f049897077afac"}, + {file = "asgiref-3.6.0.tar.gz", hash = "sha256:9567dfe7bd8d3c8c892227827c41cce860b368104c3431da67a0c5a65a949506"}, +] + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + [[package]] name = "astroid" -version = "2.15.2" +version = "2.15.3" description = "An abstract syntax tree for Python with inference support." category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "astroid-2.15.2-py3-none-any.whl", hash = "sha256:dea89d9f99f491c66ac9c04ebddf91e4acf8bd711722175fe6245c0725cc19bb"}, - {file = "astroid-2.15.2.tar.gz", hash = "sha256:6e61b85c891ec53b07471aec5878f4ac6446a41e590ede0f2ce095f39f7d49dd"}, + {file = "astroid-2.15.3-py3-none-any.whl", hash = "sha256:f11e74658da0f2a14a8d19776a8647900870a63de71db83713a8e77a6af52662"}, + {file = "astroid-2.15.3.tar.gz", hash = "sha256:44224ad27c54d770233751315fa7f74c46fa3ee0fab7beef1065f99f09897efe"}, ] [package.dependencies] lazy-object-proxy = ">=1.4.0" wrapt = {version = ">=1.14,<2", markers = "python_version >= \"3.11\""} +[[package]] +name = "async-timeout" +version = "4.0.2" +description = "Timeout context manager for asyncio programs" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "async-timeout-4.0.2.tar.gz", hash = "sha256:2163e1640ddb52b7a8c80d0a67a08587e5d245cc9c553a74a847056bc2976b15"}, + {file = "async_timeout-4.0.2-py3-none-any.whl", hash = "sha256:8ca1e4fcf50d07413d66d1a5e416e42cfdf5851c981d679a09851a6853383b3c"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "betterproto" +version = "2.0.0b5" +description = "A better Protobuf / gRPC generator & library" +category = "main" +optional = false +python-versions = "^3.7" +files = [] +develop = false + +[package.dependencies] +black = {version = ">=19.3b0", optional = true} +grpclib = "^0.4.1" +isort = {version = "^5.11.5", optional = true} +jinja2 = {version = ">=3.0.3", optional = true} +python-dateutil = "^2.8" + +[package.extras] +compiler = ["black (>=19.3b0)", "isort (>=5.11.5,<6.0.0)", "jinja2 (>=3.0.3)"] + +[package.source] +type = "git" +url = "https://github.com/MarekPikula/python-betterproto.git" +reference = "classmethod_from_dict" +resolved_reference = "d7929e9b302697d28cf661f9182f80d201facb18" + [[package]] name = "black" version = "23.3.0" description = "The uncompromising code formatter." -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -277,6 +503,70 @@ files = [ {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +[[package]] +name = "coverage" +version = "7.2.3" +description = "Code coverage measurement for Python" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e58c0d41d336569d63d1b113bd573db8363bc4146f39444125b7f8060e4e04f5"}, + {file = "coverage-7.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:344e714bd0fe921fc72d97404ebbdbf9127bac0ca1ff66d7b79efc143cf7c0c4"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:974bc90d6f6c1e59ceb1516ab00cf1cdfbb2e555795d49fa9571d611f449bcb2"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0743b0035d4b0e32bc1df5de70fba3059662ace5b9a2a86a9f894cfe66569013"}, + {file = "coverage-7.2.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d0391fb4cfc171ce40437f67eb050a340fdbd0f9f49d6353a387f1b7f9dd4fa"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4a42e1eff0ca9a7cb7dc9ecda41dfc7cbc17cb1d02117214be0561bd1134772b"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:be19931a8dcbe6ab464f3339966856996b12a00f9fe53f346ab3be872d03e257"}, + {file = "coverage-7.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:72fcae5bcac3333a4cf3b8f34eec99cea1187acd55af723bcbd559adfdcb5535"}, + {file = "coverage-7.2.3-cp310-cp310-win32.whl", hash = "sha256:aeae2aa38395b18106e552833f2a50c27ea0000122bde421c31d11ed7e6f9c91"}, + {file = "coverage-7.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:83957d349838a636e768251c7e9979e899a569794b44c3728eaebd11d848e58e"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:dfd393094cd82ceb9b40df4c77976015a314b267d498268a076e940fe7be6b79"}, + {file = "coverage-7.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:182eb9ac3f2b4874a1f41b78b87db20b66da6b9cdc32737fbbf4fea0c35b23fc"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bb1e77a9a311346294621be905ea8a2c30d3ad371fc15bb72e98bfcfae532df"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0f34363e2634deffd390a0fef1aa99168ae9ed2af01af4a1f5865e362f8623"}, + {file = "coverage-7.2.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55416d7385774285b6e2a5feca0af9652f7f444a4fa3d29d8ab052fafef9d00d"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:06ddd9c0249a0546997fdda5a30fbcb40f23926df0a874a60a8a185bc3a87d93"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:fff5aaa6becf2c6a1699ae6a39e2e6fb0672c2d42eca8eb0cafa91cf2e9bd312"}, + {file = "coverage-7.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:ea53151d87c52e98133eb8ac78f1206498c015849662ca8dc246255265d9c3c4"}, + {file = "coverage-7.2.3-cp311-cp311-win32.whl", hash = "sha256:8f6c930fd70d91ddee53194e93029e3ef2aabe26725aa3c2753df057e296b925"}, + {file = "coverage-7.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:fa546d66639d69aa967bf08156eb8c9d0cd6f6de84be9e8c9819f52ad499c910"}, + {file = "coverage-7.2.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2317d5ed777bf5a033e83d4f1389fd4ef045763141d8f10eb09a7035cee774c"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:be9824c1c874b73b96288c6d3de793bf7f3a597770205068c6163ea1f326e8b9"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c3b2803e730dc2797a017335827e9da6da0e84c745ce0f552e66400abdfb9a1"}, + {file = "coverage-7.2.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f69770f5ca1994cb32c38965e95f57504d3aea96b6c024624fdd5bb1aa494a1"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1127b16220f7bfb3f1049ed4a62d26d81970a723544e8252db0efde853268e21"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:aa784405f0c640940595fa0f14064d8e84aff0b0f762fa18393e2760a2cf5841"}, + {file = "coverage-7.2.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3146b8e16fa60427e03884301bf8209221f5761ac754ee6b267642a2fd354c48"}, + {file = "coverage-7.2.3-cp37-cp37m-win32.whl", hash = "sha256:1fd78b911aea9cec3b7e1e2622c8018d51c0d2bbcf8faaf53c2497eb114911c1"}, + {file = "coverage-7.2.3-cp37-cp37m-win_amd64.whl", hash = "sha256:0f3736a5d34e091b0a611964c6262fd68ca4363df56185902528f0b75dbb9c1f"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:981b4df72c93e3bc04478153df516d385317628bd9c10be699c93c26ddcca8ab"}, + {file = "coverage-7.2.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c0045f8f23a5fb30b2eb3b8a83664d8dc4fb58faddf8155d7109166adb9f2040"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f760073fcf8f3d6933178d67754f4f2d4e924e321f4bb0dcef0424ca0215eba1"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c86bd45d1659b1ae3d0ba1909326b03598affbc9ed71520e0ff8c31a993ad911"}, + {file = "coverage-7.2.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:172db976ae6327ed4728e2507daf8a4de73c7cc89796483e0a9198fd2e47b462"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:d2a3a6146fe9319926e1d477842ca2a63fe99af5ae690b1f5c11e6af074a6b5c"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:f649dd53833b495c3ebd04d6eec58479454a1784987af8afb77540d6c1767abd"}, + {file = "coverage-7.2.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7c4ed4e9f3b123aa403ab424430b426a1992e6f4c8fd3cb56ea520446e04d152"}, + {file = "coverage-7.2.3-cp38-cp38-win32.whl", hash = "sha256:eb0edc3ce9760d2f21637766c3aa04822030e7451981ce569a1b3456b7053f22"}, + {file = "coverage-7.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:63cdeaac4ae85a179a8d6bc09b77b564c096250d759eed343a89d91bce8b6367"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20d1a2a76bb4eb00e4d36b9699f9b7aba93271c9c29220ad4c6a9581a0320235"}, + {file = "coverage-7.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ea748802cc0de4de92ef8244dd84ffd793bd2e7be784cd8394d557a3c751e21"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b154aba06df42e4b96fc915512ab39595105f6c483991287021ed95776d934"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd214917cabdd6f673a29d708574e9fbdb892cb77eb426d0eae3490d95ca7859"}, + {file = "coverage-7.2.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2c2e58e45fe53fab81f85474e5d4d226eeab0f27b45aa062856c89389da2f0d9"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:87ecc7c9a1a9f912e306997ffee020297ccb5ea388421fe62a2a02747e4d5539"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:387065e420aed3c71b61af7e82c7b6bc1c592f7e3c7a66e9f78dd178699da4fe"}, + {file = "coverage-7.2.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ea3f5bc91d7d457da7d48c7a732beaf79d0c8131df3ab278e6bba6297e23c6c4"}, + {file = "coverage-7.2.3-cp39-cp39-win32.whl", hash = "sha256:ae7863a1d8db6a014b6f2ff9c1582ab1aad55a6d25bac19710a8df68921b6e30"}, + {file = "coverage-7.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:3f04becd4fcda03c0160d0da9c8f0c246bc78f2f7af0feea1ec0930e7c93fa4a"}, + {file = "coverage-7.2.3-pp37.pp38.pp39-none-any.whl", hash = "sha256:965ee3e782c7892befc25575fa171b521d33798132692df428a09efacaffe8d0"}, + {file = "coverage-7.2.3.tar.gz", hash = "sha256:d298c2815fa4891edd9abe5ad6e6cb4207104c7dd9fd13aea3fdebf6f9b91259"}, +] + +[package.extras] +toml = ["tomli"] + [[package]] name = "cryptography" version = "39.0.2" @@ -323,6 +613,24 @@ test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0 test-randomorder = ["pytest-randomly"] tox = ["tox"] +[[package]] +name = "deprecated" +version = "1.2.13" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +files = [ + {file = "Deprecated-1.2.13-py2.py3-none-any.whl", hash = "sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d"}, + {file = "Deprecated-1.2.13.tar.gz", hash = "sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d"}, +] + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["PyTest", "PyTest (<5)", "PyTest-Cov", "PyTest-Cov (<2.6)", "bump2version (<1)", "configparser (<5)", "importlib-metadata (<3)", "importlib-resources (<4)", "sphinx (<2)", "sphinxcontrib-websupport (<2)", "tox", "zipp (<2)"] + [[package]] name = "dill" version = "0.3.6" @@ -352,19 +660,19 @@ files = [ [[package]] name = "filelock" -version = "3.10.7" +version = "3.12.0" description = "A platform independent file lock." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "filelock-3.10.7-py3-none-any.whl", hash = "sha256:bde48477b15fde2c7e5a0713cbe72721cb5a5ad32ee0b8f419907960b9d75536"}, - {file = "filelock-3.10.7.tar.gz", hash = "sha256:892be14aa8efc01673b5ed6589dbccb95f9a8596f0507e232626155495c18105"}, + {file = "filelock-3.12.0-py3-none-any.whl", hash = "sha256:ad98852315c2ab702aeb628412cbf7e95b7ce8c3bf9565670b4eaecf1db370a9"}, + {file = "filelock-3.12.0.tar.gz", hash = "sha256:fc03ae43288c013d2ea83c8597001b1129db351aad9c57fe2409327916b8e718"}, ] [package.extras] -docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.2.2)", "diff-cover (>=7.5)", "pytest (>=7.2.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] [[package]] name = "flask" @@ -379,6 +687,7 @@ files = [ ] [package.dependencies] +asgiref = {version = ">=3.2", optional = true, markers = "extra == \"async\""} click = ">=8.0" itsdangerous = ">=2.0" Jinja2 = ">=3.0" @@ -402,24 +711,6 @@ files = [ [package.dependencies] Flask = "*" -[[package]] -name = "flask-executor" -version = "1.0.0" -description = "An easy to use Flask wrapper for concurrent.futures" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "Flask-Executor-1.0.0.tar.gz", hash = "sha256:4bc113def5d9f1c7ff272ff7ba09f843468eab80469bdbd21625a111b2c8ae7b"}, - {file = "Flask_Executor-1.0.0-py3-none-any.whl", hash = "sha256:c044dc6393326a83351e69e4ddf3686270abffeac2b599260cebf0ea5d1e93de"}, -] - -[package.dependencies] -Flask = "*" - -[package.extras] -test = ["flask-sqlalchemy", "pytest"] - [[package]] name = "flask-providers-oidc" version = "1.2.1" @@ -436,6 +727,159 @@ files = [ oauth2client = ">=4.1.3,<5.0.0" PyJWT = ">=2.6.0,<3.0.0" +[[package]] +name = "Flask-Pydantic" +version = "0.11.0" +description = "Flask extension for integration with Pydantic library" +category = "main" +optional = false +python-versions = ">=3.6" +files = [] +develop = false + +[package.dependencies] +Flask = "*" +pydantic = ">=1.7" +typing-extensions = ">=4.1.1" + +[package.source] +type = "git" +url = "https://github.com/MarekPikula/flask-pydantic.git" +reference = "dictable_models" +resolved_reference = "b85358318fb600f00ca8891437d573809d0c61b4" + +[[package]] +name = "frozenlist" +version = "1.3.3" +description = "A list-like structure which implements collections.abc.MutableSequence" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ff8bf625fe85e119553b5383ba0fb6aa3d0ec2ae980295aaefa552374926b3f4"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dfbac4c2dfcc082fcf8d942d1e49b6aa0766c19d3358bd86e2000bf0fa4a9cf0"}, + {file = "frozenlist-1.3.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b1c63e8d377d039ac769cd0926558bb7068a1f7abb0f003e3717ee003ad85530"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fdfc24dcfce5b48109867c13b4cb15e4660e7bd7661741a391f821f23dfdca7"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2c926450857408e42f0bbc295e84395722ce74bae69a3b2aa2a65fe22cb14b99"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1841e200fdafc3d51f974d9d377c079a0694a8f06de2e67b48150328d66d5483"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f470c92737afa7d4c3aacc001e335062d582053d4dbe73cda126f2d7031068dd"}, + {file = "frozenlist-1.3.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:783263a4eaad7c49983fe4b2e7b53fa9770c136c270d2d4bbb6d2192bf4d9caf"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:924620eef691990dfb56dc4709f280f40baee568c794b5c1885800c3ecc69816"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ae4dc05c465a08a866b7a1baf360747078b362e6a6dbeb0c57f234db0ef88ae0"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:bed331fe18f58d844d39ceb398b77d6ac0b010d571cba8267c2e7165806b00ce"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:02c9ac843e3390826a265e331105efeab489ffaf4dd86384595ee8ce6d35ae7f"}, + {file = "frozenlist-1.3.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:9545a33965d0d377b0bc823dcabf26980e77f1b6a7caa368a365a9497fb09420"}, + {file = "frozenlist-1.3.3-cp310-cp310-win32.whl", hash = "sha256:d5cd3ab21acbdb414bb6c31958d7b06b85eeb40f66463c264a9b343a4e238642"}, + {file = "frozenlist-1.3.3-cp310-cp310-win_amd64.whl", hash = "sha256:b756072364347cb6aa5b60f9bc18e94b2f79632de3b0190253ad770c5df17db1"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b4395e2f8d83fbe0c627b2b696acce67868793d7d9750e90e39592b3626691b7"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:14143ae966a6229350021384870458e4777d1eae4c28d1a7aa47f24d030e6678"}, + {file = "frozenlist-1.3.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5d8860749e813a6f65bad8285a0520607c9500caa23fea6ee407e63debcdbef6"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23d16d9f477bb55b6154654e0e74557040575d9d19fe78a161bd33d7d76808e8"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:eb82dbba47a8318e75f679690190c10a5e1f447fbf9df41cbc4c3afd726d88cb"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9309869032abb23d196cb4e4db574232abe8b8be1339026f489eeb34a4acfd91"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a97b4fe50b5890d36300820abd305694cb865ddb7885049587a5678215782a6b"}, + {file = "frozenlist-1.3.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c188512b43542b1e91cadc3c6c915a82a5eb95929134faf7fd109f14f9892ce4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:303e04d422e9b911a09ad499b0368dc551e8c3cd15293c99160c7f1f07b59a48"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0771aed7f596c7d73444c847a1c16288937ef988dc04fb9f7be4b2aa91db609d"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:66080ec69883597e4d026f2f71a231a1ee9887835902dbe6b6467d5a89216cf6"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:41fe21dc74ad3a779c3d73a2786bdf622ea81234bdd4faf90b8b03cad0c2c0b4"}, + {file = "frozenlist-1.3.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f20380df709d91525e4bee04746ba612a4df0972c1b8f8e1e8af997e678c7b81"}, + {file = "frozenlist-1.3.3-cp311-cp311-win32.whl", hash = "sha256:f30f1928162e189091cf4d9da2eac617bfe78ef907a761614ff577ef4edfb3c8"}, + {file = "frozenlist-1.3.3-cp311-cp311-win_amd64.whl", hash = "sha256:a6394d7dadd3cfe3f4b3b186e54d5d8504d44f2d58dcc89d693698e8b7132b32"}, + {file = "frozenlist-1.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8df3de3a9ab8325f94f646609a66cbeeede263910c5c0de0101079ad541af332"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0693c609e9742c66ba4870bcee1ad5ff35462d5ffec18710b4ac89337ff16e27"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cd4210baef299717db0a600d7a3cac81d46ef0e007f88c9335db79f8979c0d3d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:394c9c242113bfb4b9aa36e2b80a05ffa163a30691c7b5a29eba82e937895d5e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6327eb8e419f7d9c38f333cde41b9ae348bec26d840927332f17e887a8dcb70d"}, + {file = "frozenlist-1.3.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2e24900aa13212e75e5b366cb9065e78bbf3893d4baab6052d1aca10d46d944c"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:3843f84a6c465a36559161e6c59dce2f2ac10943040c2fd021cfb70d58c4ad56"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:84610c1502b2461255b4c9b7d5e9c48052601a8957cd0aea6ec7a7a1e1fb9420"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:c21b9aa40e08e4f63a2f92ff3748e6b6c84d717d033c7b3438dd3123ee18f70e"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:efce6ae830831ab6a22b9b4091d411698145cb9b8fc869e1397ccf4b4b6455cb"}, + {file = "frozenlist-1.3.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:40de71985e9042ca00b7953c4f41eabc3dc514a2d1ff534027f091bc74416401"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win32.whl", hash = "sha256:180c00c66bde6146a860cbb81b54ee0df350d2daf13ca85b275123bbf85de18a"}, + {file = "frozenlist-1.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9bbbcedd75acdfecf2159663b87f1bb5cfc80e7cd99f7ddd9d66eb98b14a8411"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:034a5c08d36649591be1cbb10e09da9f531034acfe29275fc5454a3b101ce41a"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ba64dc2b3b7b158c6660d49cdb1d872d1d0bf4e42043ad8d5006099479a194e5"}, + {file = "frozenlist-1.3.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:47df36a9fe24054b950bbc2db630d508cca3aa27ed0566c0baf661225e52c18e"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:008a054b75d77c995ea26629ab3a0c0d7281341f2fa7e1e85fa6153ae29ae99c"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:841ea19b43d438a80b4de62ac6ab21cfe6827bb8a9dc62b896acc88eaf9cecba"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e235688f42b36be2b6b06fc37ac2126a73b75fb8d6bc66dd632aa35286238703"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca713d4af15bae6e5d79b15c10c8522859a9a89d3b361a50b817c98c2fb402a2"}, + {file = "frozenlist-1.3.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ac5995f2b408017b0be26d4a1d7c61bce106ff3d9e3324374d66b5964325448"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a4ae8135b11652b08a8baf07631d3ebfe65a4c87909dbef5fa0cdde440444ee4"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4ea42116ceb6bb16dbb7d526e242cb6747b08b7710d9782aa3d6732bd8d27649"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:810860bb4bdce7557bc0febb84bbd88198b9dbc2022d8eebe5b3590b2ad6c842"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ee78feb9d293c323b59a6f2dd441b63339a30edf35abcb51187d2fc26e696d13"}, + {file = "frozenlist-1.3.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0af2e7c87d35b38732e810befb9d797a99279cbb85374d42ea61c1e9d23094b3"}, + {file = "frozenlist-1.3.3-cp38-cp38-win32.whl", hash = "sha256:899c5e1928eec13fd6f6d8dc51be23f0d09c5281e40d9cf4273d188d9feeaf9b"}, + {file = "frozenlist-1.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:7f44e24fa70f6fbc74aeec3e971f60a14dde85da364aa87f15d1be94ae75aeef"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2b07ae0c1edaa0a36339ec6cce700f51b14a3fc6545fdd32930d2c83917332cf"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ebb86518203e12e96af765ee89034a1dbb0c3c65052d1b0c19bbbd6af8a145e1"}, + {file = "frozenlist-1.3.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5cf820485f1b4c91e0417ea0afd41ce5cf5965011b3c22c400f6d144296ccbc0"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c11e43016b9024240212d2a65043b70ed8dfd3b52678a1271972702d990ac6d"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8fa3c6e3305aa1146b59a09b32b2e04074945ffcfb2f0931836d103a2c38f936"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:352bd4c8c72d508778cf05ab491f6ef36149f4d0cb3c56b1b4302852255d05d5"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65a5e4d3aa679610ac6e3569e865425b23b372277f89b5ef06cf2cdaf1ebf22b"}, + {file = "frozenlist-1.3.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e2c1185858d7e10ff045c496bbf90ae752c28b365fef2c09cf0fa309291669"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f163d2fd041c630fed01bc48d28c3ed4a3b003c00acd396900e11ee5316b56bb"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:05cdb16d09a0832eedf770cb7bd1fe57d8cf4eaf5aced29c4e41e3f20b30a784"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:8bae29d60768bfa8fb92244b74502b18fae55a80eac13c88eb0b496d4268fd2d"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:eedab4c310c0299961ac285591acd53dc6723a1ebd90a57207c71f6e0c2153ab"}, + {file = "frozenlist-1.3.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3bbdf44855ed8f0fbcd102ef05ec3012d6a4fd7c7562403f76ce6a52aeffb2b1"}, + {file = "frozenlist-1.3.3-cp39-cp39-win32.whl", hash = "sha256:efa568b885bca461f7c7b9e032655c0c143d305bf01c30caf6db2854a4532b38"}, + {file = "frozenlist-1.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:cfe33efc9cb900a4c46f91a5ceba26d6df370ffddd9ca386eb1d4f0ad97b9ea9"}, + {file = "frozenlist-1.3.3.tar.gz", hash = "sha256:58bcc55721e8a90b88332d6cd441261ebb22342e238296bb330968952fbb3a6a"}, +] + +[[package]] +name = "gitdb" +version = "4.0.10" +description = "Git Object Database" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "gitdb-4.0.10-py3-none-any.whl", hash = "sha256:c286cf298426064079ed96a9e4a9d39e7f3e9bf15ba60701e95f5492f28415c7"}, + {file = "gitdb-4.0.10.tar.gz", hash = "sha256:6eb990b69df4e15bad899ea868dc46572c3f75339735663b81de79b06f17eb9a"}, +] + +[package.dependencies] +smmap = ">=3.0.1,<6" + +[[package]] +name = "gitpython" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, +] + +[package.dependencies] +gitdb = ">=4.0.1,<5" + +[[package]] +name = "grpclib" +version = "0.4.3" +description = "Pure-Python gRPC implementation for asyncio" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "grpclib-0.4.3.tar.gz", hash = "sha256:eadf2002fc5a25158b707c0338a6c0b96dd7fbdc6df66f7e515e7f041d56a940"}, +] + +[package.dependencies] +h2 = ">=3.1.0,<5" +multidict = "*" + +[package.extras] +protobuf = ["protobuf (>=3.15.0)"] + [[package]] name = "gunicorn" version = "20.1.0" @@ -457,6 +901,56 @@ gevent = ["gevent (>=1.4.0)"] setproctitle = ["setproctitle"] tornado = ["tornado (>=0.2)"] +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +category = "main" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "headscale-api" +version = "0.2.0" +description = "Python Headscale API and configuration abstraction." +category = "main" +optional = false +python-versions = "^3.11" # TODO: Change to 3.7 once datetime parsing is fixed. +files = [] +develop = false + +[package.dependencies] +aiohttp = "^3.8.4" +betterproto = {version = "2.0.0b5", extras = ["compiler"]} +pydantic = "^1.10.7" +pydantic-yaml = {version = "^0.11.2", extras = ["ruamel"]} + +[package.source] +type = "git" +url = "https://github.com/MarekPikula/python-headscale-api.git" +reference = "HEAD" +resolved_reference = "ea01ea4ce22b82fb9f2a58855dfee68e72cdef02" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +category = "main" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + [[package]] name = "httplib2" version = "0.22.0" @@ -472,6 +966,18 @@ files = [ [package.dependencies] pyparsing = {version = ">=2.4.2,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.0.2 || >3.0.2,<3.0.3 || >3.0.3,<4", markers = "python_version > \"3.0\""} +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +category = "main" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + [[package]] name = "identify" version = "2.5.22" @@ -499,11 +1005,31 @@ files = [ {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, ] +[[package]] +name = "importlib-metadata" +version = "6.5.0" +description = "Read metadata from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_metadata-6.5.0-py3-none-any.whl", hash = "sha256:03ba783c3a2c69d751b109fc0c94a62c51f581b3d6acf8ed1331b6d5729321ff"}, + {file = "importlib_metadata-6.5.0.tar.gz", hash = "sha256:7a8bdf1bc3a726297f5cfbc999e6e7ff6b4fa41b26bba4afc580448624460045"}, +] + +[package.dependencies] +zipp = ">=0.5" + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +perf = ["ipython"] +testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] + [[package]] name = "isort" version = "5.12.0" description = "A Python utility / library to sort Python imports." -category = "dev" +category = "main" optional = false python-versions = ">=3.8.0" files = [ @@ -665,40 +1191,124 @@ files = [ {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + [[package]] name = "mypy" -version = "1.1.1" +version = "1.2.0" description = "Optional static typing for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "mypy-1.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39c7119335be05630611ee798cc982623b9e8f0cff04a0b48dfc26100e0b97af"}, - {file = "mypy-1.1.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:61bf08362e93b6b12fad3eab68c4ea903a077b87c90ac06c11e3d7a09b56b9c1"}, - {file = "mypy-1.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dbb19c9f662e41e474e0cff502b7064a7edc6764f5262b6cd91d698163196799"}, - {file = "mypy-1.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:315ac73cc1cce4771c27d426b7ea558fb4e2836f89cb0296cbe056894e3a1f78"}, - {file = "mypy-1.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:5cb14ff9919b7df3538590fc4d4c49a0f84392237cbf5f7a816b4161c061829e"}, - {file = "mypy-1.1.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:26cdd6a22b9b40b2fd71881a8a4f34b4d7914c679f154f43385ca878a8297389"}, - {file = "mypy-1.1.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5b5f81b40d94c785f288948c16e1f2da37203c6006546c5d947aab6f90aefef2"}, - {file = "mypy-1.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21b437be1c02712a605591e1ed1d858aba681757a1e55fe678a15c2244cd68a5"}, - {file = "mypy-1.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d809f88734f44a0d44959d795b1e6f64b2bbe0ea4d9cc4776aa588bb4229fc1c"}, - {file = "mypy-1.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:a380c041db500e1410bb5b16b3c1c35e61e773a5c3517926b81dfdab7582be54"}, - {file = "mypy-1.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b7c7b708fe9a871a96626d61912e3f4ddd365bf7f39128362bc50cbd74a634d5"}, - {file = "mypy-1.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1c10fa12df1232c936830839e2e935d090fc9ee315744ac33b8a32216b93707"}, - {file = "mypy-1.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0a28a76785bf57655a8ea5eb0540a15b0e781c807b5aa798bd463779988fa1d5"}, - {file = "mypy-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:ef6a01e563ec6a4940784c574d33f6ac1943864634517984471642908b30b6f7"}, - {file = "mypy-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d64c28e03ce40d5303450f547e07418c64c241669ab20610f273c9e6290b4b0b"}, - {file = "mypy-1.1.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:64cc3afb3e9e71a79d06e3ed24bb508a6d66f782aff7e56f628bf35ba2e0ba51"}, - {file = "mypy-1.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce61663faf7a8e5ec6f456857bfbcec2901fbdb3ad958b778403f63b9e606a1b"}, - {file = "mypy-1.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2b0c373d071593deefbcdd87ec8db91ea13bd8f1328d44947e88beae21e8d5e9"}, - {file = "mypy-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:2888ce4fe5aae5a673386fa232473014056967f3904f5abfcf6367b5af1f612a"}, - {file = "mypy-1.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:19ba15f9627a5723e522d007fe708007bae52b93faab00f95d72f03e1afa9598"}, - {file = "mypy-1.1.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:59bbd71e5c58eed2e992ce6523180e03c221dcd92b52f0e792f291d67b15a71c"}, - {file = "mypy-1.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9401e33814cec6aec8c03a9548e9385e0e228fc1b8b0a37b9ea21038e64cdd8a"}, - {file = "mypy-1.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4b398d8b1f4fba0e3c6463e02f8ad3346f71956b92287af22c9b12c3ec965a9f"}, - {file = "mypy-1.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:69b35d1dcb5707382810765ed34da9db47e7f95b3528334a3c999b0c90fe523f"}, - {file = "mypy-1.1.1-py3-none-any.whl", hash = "sha256:4e4e8b362cdf99ba00c2b218036002bdcdf1e0de085cdb296a49df03fb31dfc4"}, - {file = "mypy-1.1.1.tar.gz", hash = "sha256:ae9ceae0f5b9059f33dbc62dea087e942c0ccab4b7a003719cb70f9b8abfa32f"}, + {file = "mypy-1.2.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:701189408b460a2ff42b984e6bd45c3f41f0ac9f5f58b8873bbedc511900086d"}, + {file = "mypy-1.2.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe91be1c51c90e2afe6827601ca14353bbf3953f343c2129fa1e247d55fd95ba"}, + {file = "mypy-1.2.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d26b513225ffd3eacece727f4387bdce6469192ef029ca9dd469940158bc89e"}, + {file = "mypy-1.2.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3a2d219775a120581a0ae8ca392b31f238d452729adbcb6892fa89688cb8306a"}, + {file = "mypy-1.2.0-cp310-cp310-win_amd64.whl", hash = "sha256:2e93a8a553e0394b26c4ca683923b85a69f7ccdc0139e6acd1354cc884fe0128"}, + {file = "mypy-1.2.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3efde4af6f2d3ccf58ae825495dbb8d74abd6d176ee686ce2ab19bd025273f41"}, + {file = "mypy-1.2.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:695c45cea7e8abb6f088a34a6034b1d273122e5530aeebb9c09626cea6dca4cb"}, + {file = "mypy-1.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0e9464a0af6715852267bf29c9553e4555b61f5904a4fc538547a4d67617937"}, + {file = "mypy-1.2.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:8293a216e902ac12779eb7a08f2bc39ec6c878d7c6025aa59464e0c4c16f7eb9"}, + {file = "mypy-1.2.0-cp311-cp311-win_amd64.whl", hash = "sha256:f46af8d162f3d470d8ffc997aaf7a269996d205f9d746124a179d3abe05ac602"}, + {file = "mypy-1.2.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:031fc69c9a7e12bcc5660b74122ed84b3f1c505e762cc4296884096c6d8ee140"}, + {file = "mypy-1.2.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:390bc685ec209ada4e9d35068ac6988c60160b2b703072d2850457b62499e336"}, + {file = "mypy-1.2.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:4b41412df69ec06ab141808d12e0bf2823717b1c363bd77b4c0820feaa37249e"}, + {file = "mypy-1.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4e4a682b3f2489d218751981639cffc4e281d548f9d517addfd5a2917ac78119"}, + {file = "mypy-1.2.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a197ad3a774f8e74f21e428f0de7f60ad26a8d23437b69638aac2764d1e06a6a"}, + {file = "mypy-1.2.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:c9a084bce1061e55cdc0493a2ad890375af359c766b8ac311ac8120d3a472950"}, + {file = "mypy-1.2.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eaeaa0888b7f3ccb7bcd40b50497ca30923dba14f385bde4af78fac713d6d6f6"}, + {file = "mypy-1.2.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bea55fc25b96c53affab852ad94bf111a3083bc1d8b0c76a61dd101d8a388cf5"}, + {file = "mypy-1.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:4c8d8c6b80aa4a1689f2a179d31d86ae1367ea4a12855cc13aa3ba24bb36b2d8"}, + {file = "mypy-1.2.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:70894c5345bea98321a2fe84df35f43ee7bb0feec117a71420c60459fc3e1eed"}, + {file = "mypy-1.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4a99fe1768925e4a139aace8f3fb66db3576ee1c30b9c0f70f744ead7e329c9f"}, + {file = "mypy-1.2.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:023fe9e618182ca6317ae89833ba422c411469156b690fde6a315ad10695a521"}, + {file = "mypy-1.2.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:4d19f1a239d59f10fdc31263d48b7937c585810288376671eaf75380b074f238"}, + {file = "mypy-1.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:2de7babe398cb7a85ac7f1fd5c42f396c215ab3eff731b4d761d68d0f6a80f48"}, + {file = "mypy-1.2.0-py3-none-any.whl", hash = "sha256:d8e9187bfcd5ffedbe87403195e1fc340189a68463903c39e2b63307c9fa0394"}, + {file = "mypy-1.2.0.tar.gz", hash = "sha256:f70a40410d774ae23fcb4afbbeca652905a04de7948eaf0b1789c8d1426b72d1"}, ] [package.dependencies] @@ -715,7 +1325,7 @@ reports = ["lxml"] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." -category = "dev" +category = "main" optional = false python-versions = ">=3.5" files = [ @@ -759,21 +1369,21 @@ six = ">=1.6.1" [[package]] name = "packaging" -version = "23.0" +version = "23.1" description = "Core utilities for Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, - {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pathspec" version = "0.11.1" description = "Utility library for gitignore style pattern matching of file paths." -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -785,7 +1395,7 @@ files = [ name = "platformdirs" version = "3.2.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -799,14 +1409,14 @@ test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.2.2)", "pytest- [[package]] name = "pre-commit" -version = "3.2.1" +version = "3.2.2" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "pre_commit-3.2.1-py2.py3-none-any.whl", hash = "sha256:a06a7fcce7f420047a71213c175714216498b49ebc81fe106f7716ca265f5bb6"}, - {file = "pre_commit-3.2.1.tar.gz", hash = "sha256:b5aee7d75dbba21ee161ba641b01e7ae10c5b91967ebf7b2ab0dfae12d07e1f1"}, + {file = "pre_commit-3.2.2-py2.py3-none-any.whl", hash = "sha256:0b4210aea813fe81144e87c5a291f09ea66f199f367fa1df41b55e1d26e1e2b4"}, + {file = "pre_commit-3.2.2.tar.gz", hash = "sha256:5b808fcbda4afbccf6d6633a56663fed35b6c2bc08096fd3d47ce197ac351d9d"}, ] [package.dependencies] @@ -818,30 +1428,30 @@ virtualenv = ">=20.10.0" [[package]] name = "pyasn1" -version = "0.4.8" -description = "ASN.1 types and codecs" +version = "0.5.0" +description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" category = "main" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-0.4.8-py2.py3-none-any.whl", hash = "sha256:39c7e2ec30515947ff4e87fb6f456dfc6e84857d34be479c9d4a4ba4bf46aa5d"}, - {file = "pyasn1-0.4.8.tar.gz", hash = "sha256:aef77c9fb94a3ac588e87841208bdec464471d9871bd5050a287cc9a475cd0ba"}, + {file = "pyasn1-0.5.0-py2.py3-none-any.whl", hash = "sha256:87a2121042a1ac9358cabcaf1d07680ff97ee6404333bacca15f76aa8ad01a57"}, + {file = "pyasn1-0.5.0.tar.gz", hash = "sha256:97b7290ca68e62a832558ec3976f15cbf911bf5d7c7039d8b861c2a0ece69fde"}, ] [[package]] name = "pyasn1-modules" -version = "0.2.8" -description = "A collection of ASN.1-based protocols modules." +version = "0.3.0" +description = "A collection of ASN.1-based protocols modules" category = "main" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pyasn1-modules-0.2.8.tar.gz", hash = "sha256:905f84c712230b2c592c19470d3ca8d552de726050d1d1716282a1f6146be65e"}, - {file = "pyasn1_modules-0.2.8-py2.py3-none-any.whl", hash = "sha256:a50b808ffeb97cb3601dd25981f6b016cbb3d31fbf57a8b8a87428e6158d0c74"}, + {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, + {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, ] [package.dependencies] -pyasn1 = ">=0.4.6,<0.5.0" +pyasn1 = ">=0.4.6,<0.6.0" [[package]] name = "pycparser" @@ -855,6 +1465,85 @@ files = [ {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] +[[package]] +name = "pydantic" +version = "1.10.7" +description = "Data validation and settings management using python type hints" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e79e999e539872e903767c417c897e729e015872040e56b96e67968c3b918b2d"}, + {file = "pydantic-1.10.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:01aea3a42c13f2602b7ecbbea484a98169fb568ebd9e247593ea05f01b884b2e"}, + {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:516f1ed9bc2406a0467dd777afc636c7091d71f214d5e413d64fef45174cfc7a"}, + {file = "pydantic-1.10.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae150a63564929c675d7f2303008d88426a0add46efd76c3fc797cd71cb1b46f"}, + {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:ecbbc51391248116c0a055899e6c3e7ffbb11fb5e2a4cd6f2d0b93272118a209"}, + {file = "pydantic-1.10.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f4a2b50e2b03d5776e7f21af73e2070e1b5c0d0df255a827e7c632962f8315af"}, + {file = "pydantic-1.10.7-cp310-cp310-win_amd64.whl", hash = "sha256:a7cd2251439988b413cb0a985c4ed82b6c6aac382dbaff53ae03c4b23a70e80a"}, + {file = "pydantic-1.10.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:68792151e174a4aa9e9fc1b4e653e65a354a2fa0fed169f7b3d09902ad2cb6f1"}, + {file = "pydantic-1.10.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe2507b8ef209da71b6fb5f4e597b50c5a34b78d7e857c4f8f3115effaef5fe"}, + {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10a86d8c8db68086f1e30a530f7d5f83eb0685e632e411dbbcf2d5c0150e8dcd"}, + {file = "pydantic-1.10.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75ae19d2a3dbb146b6f324031c24f8a3f52ff5d6a9f22f0683694b3afcb16fb"}, + {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:464855a7ff7f2cc2cf537ecc421291b9132aa9c79aef44e917ad711b4a93163b"}, + {file = "pydantic-1.10.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:193924c563fae6ddcb71d3f06fa153866423ac1b793a47936656e806b64e24ca"}, + {file = "pydantic-1.10.7-cp311-cp311-win_amd64.whl", hash = "sha256:b4a849d10f211389502059c33332e91327bc154acc1845f375a99eca3afa802d"}, + {file = "pydantic-1.10.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cc1dde4e50a5fc1336ee0581c1612215bc64ed6d28d2c7c6f25d2fe3e7c3e918"}, + {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e0cfe895a504c060e5d36b287ee696e2fdad02d89e0d895f83037245218a87fe"}, + {file = "pydantic-1.10.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:670bb4683ad1e48b0ecb06f0cfe2178dcf74ff27921cdf1606e527d2617a81ee"}, + {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:950ce33857841f9a337ce07ddf46bc84e1c4946d2a3bba18f8280297157a3fd1"}, + {file = "pydantic-1.10.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:c15582f9055fbc1bfe50266a19771bbbef33dd28c45e78afbe1996fd70966c2a"}, + {file = "pydantic-1.10.7-cp37-cp37m-win_amd64.whl", hash = "sha256:82dffb306dd20bd5268fd6379bc4bfe75242a9c2b79fec58e1041fbbdb1f7914"}, + {file = "pydantic-1.10.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8c7f51861d73e8b9ddcb9916ae7ac39fb52761d9ea0df41128e81e2ba42886cd"}, + {file = "pydantic-1.10.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:6434b49c0b03a51021ade5c4daa7d70c98f7a79e95b551201fff682fc1661245"}, + {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64d34ab766fa056df49013bb6e79921a0265204c071984e75a09cbceacbbdd5d"}, + {file = "pydantic-1.10.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:701daea9ffe9d26f97b52f1d157e0d4121644f0fcf80b443248434958fd03dc3"}, + {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:cf135c46099ff3f919d2150a948ce94b9ce545598ef2c6c7bf55dca98a304b52"}, + {file = "pydantic-1.10.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0f85904f73161817b80781cc150f8b906d521fa11e3cdabae19a581c3606209"}, + {file = "pydantic-1.10.7-cp38-cp38-win_amd64.whl", hash = "sha256:9f6f0fd68d73257ad6685419478c5aece46432f4bdd8d32c7345f1986496171e"}, + {file = "pydantic-1.10.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c230c0d8a322276d6e7b88c3f7ce885f9ed16e0910354510e0bae84d54991143"}, + {file = "pydantic-1.10.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:976cae77ba6a49d80f461fd8bba183ff7ba79f44aa5cfa82f1346b5626542f8e"}, + {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d45fc99d64af9aaf7e308054a0067fdcd87ffe974f2442312372dfa66e1001d"}, + {file = "pydantic-1.10.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d2a5ebb48958754d386195fe9e9c5106f11275867051bf017a8059410e9abf1f"}, + {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:abfb7d4a7cd5cc4e1d1887c43503a7c5dd608eadf8bc615413fc498d3e4645cd"}, + {file = "pydantic-1.10.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:80b1fab4deb08a8292d15e43a6edccdffa5377a36a4597bb545b93e79c5ff0a5"}, + {file = "pydantic-1.10.7-cp39-cp39-win_amd64.whl", hash = "sha256:d71e69699498b020ea198468e2480a2f1e7433e32a3a99760058c6520e2bea7e"}, + {file = "pydantic-1.10.7-py3-none-any.whl", hash = "sha256:0cd181f1d0b1d00e2b705f1bf1ac7799a2d938cce3376b8007df62b29be3c2c6"}, + {file = "pydantic-1.10.7.tar.gz", hash = "sha256:cfc83c0678b6ba51b0532bea66860617c4cd4251ecf76e9846fa5a9f3454e97e"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" + +[package.extras] +dotenv = ["python-dotenv (>=0.10.4)"] +email = ["email-validator (>=1.0.3)"] + +[[package]] +name = "pydantic-yaml" +version = "0.11.2" +description = "Adds some YAML functionality to the excellent `pydantic` library." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic_yaml-0.11.2-py3-none-any.whl", hash = "sha256:0f70235472861985eaca3fe6c71d86329556c296052ac522c5ebc7322e0749f3"}, + {file = "pydantic_yaml-0.11.2.tar.gz", hash = "sha256:19c8f3c9a97041b0a3d8fc06ca5143ff71c0846c45b39fde719cfbc98be7a00c"}, +] + +[package.dependencies] +deprecated = ">=1.2.5,<1.3.0" +importlib-metadata = "*" +pydantic = ">=1.8,<2" +"ruamel.yaml" = {version = ">=0.15,<0.18", optional = true, markers = "extra == \"ruamel\""} +types-Deprecated = "*" + +[package.extras] +dev = ["black (==23.3.0)", "flake8", "mypy (==1.0.0)", "pre-commit (==2.21.0)", "pytest (==7.2.2)", "setuptools (>=61.0.0)", "setuptools-scm[toml] (>=6.2)"] +docs = ["mkdocs", "mkdocs-material", "mkdocstrings[python]", "pygments", "pymdown-extensions"] +pyyaml = ["pyyaml", "types-PyYAML"] +ruamel = ["ruamel.yaml (>=0.15,<0.18)"] +semver = ["semver (>=2.13.0,<4)"] + [[package]] name = "pydocstyle" version = "6.3.0" @@ -893,18 +1582,18 @@ tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] [[package]] name = "pylint" -version = "2.17.1" +version = "2.17.2" description = "python code static checker" category = "dev" optional = false python-versions = ">=3.7.2" files = [ - {file = "pylint-2.17.1-py3-none-any.whl", hash = "sha256:8660a54e3f696243d644fca98f79013a959c03f979992c1ab59c24d3f4ec2700"}, - {file = "pylint-2.17.1.tar.gz", hash = "sha256:d4d009b0116e16845533bc2163493d6681846ac725eab8ca8014afb520178ddd"}, + {file = "pylint-2.17.2-py3-none-any.whl", hash = "sha256:001cc91366a7df2970941d7e6bbefcbf98694e00102c1f121c531a814ddc2ea8"}, + {file = "pylint-2.17.2.tar.gz", hash = "sha256:1b647da5249e7c279118f657ca28b6aaebb299f86bf92affc632acf199f7adbb"}, ] [package.dependencies] -astroid = ">=2.15.0,<=2.17.0-dev0" +astroid = ">=2.15.2,<=2.17.0-dev0" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} dill = {version = ">=0.3.6", markers = "python_version >= \"3.11\""} isort = ">=4.2.5,<6" @@ -916,6 +1605,37 @@ tomlkit = ">=0.10.1" spelling = ["pyenchant (>=3.2,<4.0)"] testutils = ["gitpython (>3)"] +[[package]] +name = "pylint-plugin-utils" +version = "0.7" +description = "Utilities and helpers for writing Pylint plugins" +category = "dev" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "pylint-plugin-utils-0.7.tar.gz", hash = "sha256:ce48bc0516ae9415dd5c752c940dfe601b18fe0f48aa249f2386adfa95a004dd"}, + {file = "pylint_plugin_utils-0.7-py3-none-any.whl", hash = "sha256:b3d43e85ab74c4f48bb46ae4ce771e39c3a20f8b3d56982ab17aa73b4f98d535"}, +] + +[package.dependencies] +pylint = ">=1.7" + +[[package]] +name = "pylint-pydantic" +version = "0.1.8" +description = "A Pylint plugin to help Pylint understand the Pydantic" +category = "dev" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pylint_pydantic-0.1.8-py3-none-any.whl", hash = "sha256:4033c67e06885115fa3bb16e3b9ce918ac6439a87e9b4d314158e09bc1067ecb"}, +] + +[package.dependencies] +pydantic = "<2.0" +pylint = ">2.0,<3.0" +pylint-plugin-utils = "*" + [[package]] name = "pyparsing" version = "3.0.9" @@ -948,16 +1668,31 @@ six = ">=1.5" [[package]] name = "pytz" -version = "2022.7.1" +version = "2023.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, - {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, + {file = "pytz-2023.3-py2.py3-none-any.whl", hash = "sha256:a151b3abb88eda1d4e34a9814df37de2a80e301e68ba0fd856fb9b46bfbbbffb"}, + {file = "pytz-2023.3.tar.gz", hash = "sha256:1d8ce29db189191fb55338ee6d0387d82ab59f3d00eac103412d64e0ebd0c588"}, ] +[[package]] +name = "pytz-deprecation-shim" +version = "0.1.0.post0" +description = "Shims to make deprecation of pytz easier" +category = "main" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +files = [ + {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, + {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, +] + +[package.dependencies] +tzdata = {version = "*", markers = "python_version >= \"3.6\""} + [[package]] name = "pyuwsgi" version = "2.0.21" @@ -1008,7 +1743,7 @@ files = [ name = "pyyaml" version = "6.0" description = "YAML parser and emitter for Python" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" files = [ @@ -1091,6 +1826,22 @@ files = [ [package.dependencies] pyasn1 = ">=0.1.3" +[[package]] +name = "ruamel-yaml" +version = "0.17.21" +description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" +category = "main" +optional = false +python-versions = ">=3" +files = [ + {file = "ruamel.yaml-0.17.21-py3-none-any.whl", hash = "sha256:742b35d3d665023981bd6d16b3d24248ce5df75fdb4e2924e93a05c1f8b61ca7"}, + {file = "ruamel.yaml-0.17.21.tar.gz", hash = "sha256:8b7ce697a2f212752a35c1ac414471dc16c424c9573be4926b56ff3f5d23b7af"}, +] + +[package.extras] +docs = ["ryd"] +jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] + [[package]] name = "ruff" version = "0.0.260" @@ -1120,14 +1871,14 @@ files = [ [[package]] name = "setuptools" -version = "67.6.1" +version = "67.7.0" description = "Easily download, build, install, upgrade, and uninstall Python packages" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-67.6.1-py3-none-any.whl", hash = "sha256:e728ca814a823bf7bf60162daf9db95b93d532948c4c0bea762ce62f60189078"}, - {file = "setuptools-67.6.1.tar.gz", hash = "sha256:257de92a9d50a60b8e22abfcbb771571fde0dbf3ec234463212027a4eeecbe9a"}, + {file = "setuptools-67.7.0-py3-none-any.whl", hash = "sha256:888be97fde8cc3afd60f7784e678fa29ee13c4e5362daa7104a93bba33646c50"}, + {file = "setuptools-67.7.0.tar.gz", hash = "sha256:b7e53a01c6c654d26d2999ee033d8c6125e5fa55f03b7b193f937ae7ac999f22"}, ] [package.extras] @@ -1147,6 +1898,18 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "smmap" +version = "5.0.0" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=3.6" +files = [ + {file = "smmap-5.0.0-py3-none-any.whl", hash = "sha256:2aba19d6a040e78d8b09de5c57e96207b09ed71d8e55ce0959eeee6c8e190d94"}, + {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -1171,11 +1934,50 @@ files = [ {file = "tomlkit-0.11.7.tar.gz", hash = "sha256:f392ef70ad87a672f02519f99967d28a4d3047133e2d1df936511465fbb3791d"}, ] +[[package]] +name = "types-deprecated" +version = "1.2.9.2" +description = "Typing stubs for Deprecated" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "types-Deprecated-1.2.9.2.tar.gz", hash = "sha256:91616fd6745f8bf2d457fbbbefd14cde43838e9f00a04b5a0eae4fc1f7bbc697"}, + {file = "types_Deprecated-1.2.9.2-py3-none-any.whl", hash = "sha256:327783e137353b0ef9cf47a8cd4b1c0b8ae72f6554eb25820783c6a81a3d556f"}, +] + +[[package]] +name = "types-requests" +version = "2.28.11.17" +description = "Typing stubs for requests" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-requests-2.28.11.17.tar.gz", hash = "sha256:0d580652ce903f643f8c3b494dd01d29367ea57cea0c7ad7f65cf3169092edb0"}, + {file = "types_requests-2.28.11.17-py3-none-any.whl", hash = "sha256:cc1aba862575019306b2ed134eb1ea994cab1c887a22e18d3383e6dd42e9789b"}, +] + +[package.dependencies] +types-urllib3 = "<1.27" + +[[package]] +name = "types-urllib3" +version = "1.26.25.10" +description = "Typing stubs for urllib3" +category = "dev" +optional = false +python-versions = "*" +files = [ + {file = "types-urllib3-1.26.25.10.tar.gz", hash = "sha256:c44881cde9fc8256d05ad6b21f50c4681eb20092552351570ab0a8a0653286d6"}, + {file = "types_urllib3-1.26.25.10-py3-none-any.whl", hash = "sha256:12c744609d588340a07e45d333bf870069fc8793bcf96bae7a96d4712a42591d"}, +] + [[package]] name = "typing-extensions" version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ @@ -1183,6 +1985,37 @@ files = [ {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] +[[package]] +name = "tzdata" +version = "2023.3" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2023.3-py2.py3-none-any.whl", hash = "sha256:7e65763eef3120314099b6939b5546db7adce1e7d6f2e179e3df563c70511eda"}, + {file = "tzdata-2023.3.tar.gz", hash = "sha256:11ef1e08e54acb0d4f95bdb1be05da659673de4acbd21bf9c69e94cc5e907a3a"}, +] + +[[package]] +name = "tzlocal" +version = "4.3" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tzlocal-4.3-py3-none-any.whl", hash = "sha256:b44c4388f3d34f25862cfbb387578a4d70fec417649da694a132f628a23367e2"}, + {file = "tzlocal-4.3.tar.gz", hash = "sha256:3f21d09e1b2aa9f2dacca12da240ca37de3ba5237a93addfd6d593afe9073355"}, +] + +[package.dependencies] +pytz-deprecation-shim = "*" +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "check-manifest", "flake8", "pyroma", "pytest (>=4.3)", "pytest-cov", "pytest-mock (>=3.3)", "zest.releaser"] + [[package]] name = "urllib3" version = "1.26.15" @@ -1202,24 +2035,24 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.21.0" +version = "20.22.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "virtualenv-20.21.0-py3-none-any.whl", hash = "sha256:31712f8f2a17bd06234fa97fdf19609e789dd4e3e4bf108c3da71d710651adbc"}, - {file = "virtualenv-20.21.0.tar.gz", hash = "sha256:f50e3e60f990a0757c9b68333c9fdaa72d7188caa417f96af9e52407831a3b68"}, + {file = "virtualenv-20.22.0-py3-none-any.whl", hash = "sha256:48fd3b907b5149c5aab7c23d9790bea4cac6bc6b150af8635febc4cfeab1275a"}, + {file = "virtualenv-20.22.0.tar.gz", hash = "sha256:278753c47aaef1a0f14e6db8a4c5e1e040e90aea654d0fc1dc7e0d8a42616cc3"}, ] [package.dependencies] distlib = ">=0.3.6,<1" -filelock = ">=3.4.1,<4" -platformdirs = ">=2.4,<4" +filelock = ">=3.11,<4" +platformdirs = ">=3.2,<4" [package.extras] -docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] -test = ["covdefaults (>=2.2.2)", "coverage (>=7.1)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23)", "pytest (>=7.2.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] +docs = ["furo (>=2023.3.27)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=22.12)"] +test = ["covdefaults (>=2.3)", "coverage (>=7.2.3)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.3.1)", "pytest-env (>=0.8.1)", "pytest-freezegun (>=0.4.2)", "pytest-mock (>=3.10)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)"] [[package]] name = "werkzeug" @@ -1243,7 +2076,7 @@ watchdog = ["watchdog"] name = "wrapt" version = "1.15.0" description = "Module for decorators, wrappers and monkey patching." -category = "dev" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" files = [ @@ -1324,7 +2157,111 @@ files = [ {file = "wrapt-1.15.0.tar.gz", hash = "sha256:d06730c6aed78cee4126234cf2d071e01b44b915e725a6cb439a879ec9754a3a"}, ] +[[package]] +name = "yarl" +version = "1.8.2" +description = "Yet another URL library" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:bb81f753c815f6b8e2ddd2eef3c855cf7da193b82396ac013c661aaa6cc6b0a5"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:47d49ac96156f0928f002e2424299b2c91d9db73e08c4cd6742923a086f1c863"}, + {file = "yarl-1.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3fc056e35fa6fba63248d93ff6e672c096f95f7836938241ebc8260e062832fe"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:58a3c13d1c3005dbbac5c9f0d3210b60220a65a999b1833aa46bd6677c69b08e"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10b08293cda921157f1e7c2790999d903b3fd28cd5c208cf8826b3b508026996"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de986979bbd87272fe557e0a8fcb66fd40ae2ddfe28a8b1ce4eae22681728fef"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c4fcfa71e2c6a3cb568cf81aadc12768b9995323186a10827beccf5fa23d4f8"}, + {file = "yarl-1.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae4d7ff1049f36accde9e1ef7301912a751e5bae0a9d142459646114c70ecba6"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:bf071f797aec5b96abfc735ab97da9fd8f8768b43ce2abd85356a3127909d146"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:74dece2bfc60f0f70907c34b857ee98f2c6dd0f75185db133770cd67300d505f"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:df60a94d332158b444301c7f569659c926168e4d4aad2cfbf4bce0e8fb8be826"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:63243b21c6e28ec2375f932a10ce7eda65139b5b854c0f6b82ed945ba526bff3"}, + {file = "yarl-1.8.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cfa2bbca929aa742b5084fd4663dd4b87c191c844326fcb21c3afd2d11497f80"}, + {file = "yarl-1.8.2-cp310-cp310-win32.whl", hash = "sha256:b05df9ea7496df11b710081bd90ecc3a3db6adb4fee36f6a411e7bc91a18aa42"}, + {file = "yarl-1.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:24ad1d10c9db1953291f56b5fe76203977f1ed05f82d09ec97acb623a7976574"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2a1fca9588f360036242f379bfea2b8b44cae2721859b1c56d033adfd5893634"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f37db05c6051eff17bc832914fe46869f8849de5b92dc4a3466cd63095d23dfd"}, + {file = "yarl-1.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77e913b846a6b9c5f767b14dc1e759e5aff05502fe73079f6f4176359d832581"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0978f29222e649c351b173da2b9b4665ad1feb8d1daa9d971eb90df08702668a"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:388a45dc77198b2460eac0aca1efd6a7c09e976ee768b0d5109173e521a19daf"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2305517e332a862ef75be8fad3606ea10108662bc6fe08509d5ca99503ac2aee"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42430ff511571940d51e75cf42f1e4dbdded477e71c1b7a17f4da76c1da8ea76"}, + {file = "yarl-1.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3150078118f62371375e1e69b13b48288e44f6691c1069340081c3fd12c94d5b"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:c15163b6125db87c8f53c98baa5e785782078fbd2dbeaa04c6141935eb6dab7a"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:4d04acba75c72e6eb90745447d69f84e6c9056390f7a9724605ca9c56b4afcc6"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e7fd20d6576c10306dea2d6a5765f46f0ac5d6f53436217913e952d19237efc4"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:75c16b2a900b3536dfc7014905a128a2bea8fb01f9ee26d2d7d8db0a08e7cb2c"}, + {file = "yarl-1.8.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6d88056a04860a98341a0cf53e950e3ac9f4e51d1b6f61a53b0609df342cc8b2"}, + {file = "yarl-1.8.2-cp311-cp311-win32.whl", hash = "sha256:fb742dcdd5eec9f26b61224c23baea46c9055cf16f62475e11b9b15dfd5c117b"}, + {file = "yarl-1.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8c46d3d89902c393a1d1e243ac847e0442d0196bbd81aecc94fcebbc2fd5857c"}, + {file = "yarl-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ceff9722e0df2e0a9e8a79c610842004fa54e5b309fe6d218e47cd52f791d7ef"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3f6b4aca43b602ba0f1459de647af954769919c4714706be36af670a5f44c9c1"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1684a9bd9077e922300ecd48003ddae7a7474e0412bea38d4631443a91d61077"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ebb78745273e51b9832ef90c0898501006670d6e059f2cdb0e999494eb1450c2"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3adeef150d528ded2a8e734ebf9ae2e658f4c49bf413f5f157a470e17a4a2e89"}, + {file = "yarl-1.8.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57a7c87927a468e5a1dc60c17caf9597161d66457a34273ab1760219953f7f4c"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:efff27bd8cbe1f9bd127e7894942ccc20c857aa8b5a0327874f30201e5ce83d0"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a783cd344113cb88c5ff7ca32f1f16532a6f2142185147822187913eb989f739"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:705227dccbe96ab02c7cb2c43e1228e2826e7ead880bb19ec94ef279e9555b5b"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:34c09b43bd538bf6c4b891ecce94b6fa4f1f10663a8d4ca589a079a5018f6ed7"}, + {file = "yarl-1.8.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a48f4f7fea9a51098b02209d90297ac324241bf37ff6be6d2b0149ab2bd51b37"}, + {file = "yarl-1.8.2-cp37-cp37m-win32.whl", hash = "sha256:0414fd91ce0b763d4eadb4456795b307a71524dbacd015c657bb2a39db2eab89"}, + {file = "yarl-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:d881d152ae0007809c2c02e22aa534e702f12071e6b285e90945aa3c376463c5"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5df5e3d04101c1e5c3b1d69710b0574171cc02fddc4b23d1b2813e75f35a30b1"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7a66c506ec67eb3159eea5096acd05f5e788ceec7b96087d30c7d2865a243918"}, + {file = "yarl-1.8.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2b4fa2606adf392051d990c3b3877d768771adc3faf2e117b9de7eb977741229"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e21fb44e1eff06dd6ef971d4bdc611807d6bd3691223d9c01a18cec3677939e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:93202666046d9edadfe9f2e7bf5e0782ea0d497b6d63da322e541665d65a044e"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fc77086ce244453e074e445104f0ecb27530d6fd3a46698e33f6c38951d5a0f1"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dd68a92cab699a233641f5929a40f02a4ede8c009068ca8aa1fe87b8c20ae3"}, + {file = "yarl-1.8.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b372aad2b5f81db66ee7ec085cbad72c4da660d994e8e590c997e9b01e44901"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e6f3515aafe0209dd17fb9bdd3b4e892963370b3de781f53e1746a521fb39fc0"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:dfef7350ee369197106805e193d420b75467b6cceac646ea5ed3049fcc950a05"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:728be34f70a190566d20aa13dc1f01dc44b6aa74580e10a3fb159691bc76909d"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:ff205b58dc2929191f68162633d5e10e8044398d7a45265f90a0f1d51f85f72c"}, + {file = "yarl-1.8.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:baf211dcad448a87a0d9047dc8282d7de59473ade7d7fdf22150b1d23859f946"}, + {file = "yarl-1.8.2-cp38-cp38-win32.whl", hash = "sha256:272b4f1599f1b621bf2aabe4e5b54f39a933971f4e7c9aa311d6d7dc06965165"}, + {file = "yarl-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:326dd1d3caf910cd26a26ccbfb84c03b608ba32499b5d6eeb09252c920bcbe4f"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:f8ca8ad414c85bbc50f49c0a106f951613dfa5f948ab69c10ce9b128d368baf8"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:418857f837347e8aaef682679f41e36c24250097f9e2f315d39bae3a99a34cbf"}, + {file = "yarl-1.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ae0eec05ab49e91a78700761777f284c2df119376e391db42c38ab46fd662b77"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:009a028127e0a1755c38b03244c0bea9d5565630db9c4cf9572496e947137a87"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3edac5d74bb3209c418805bda77f973117836e1de7c000e9755e572c1f7850d0"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:da65c3f263729e47351261351b8679c6429151ef9649bba08ef2528ff2c423b2"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0ef8fb25e52663a1c85d608f6dd72e19bd390e2ecaf29c17fb08f730226e3a08"}, + {file = "yarl-1.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcd7bb1e5c45274af9a1dd7494d3c52b2be5e6bd8d7e49c612705fd45420b12d"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:44ceac0450e648de86da8e42674f9b7077d763ea80c8ceb9d1c3e41f0f0a9951"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:97209cc91189b48e7cfe777237c04af8e7cc51eb369004e061809bcdf4e55220"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:48dd18adcf98ea9cd721a25313aef49d70d413a999d7d89df44f469edfb38a06"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e59399dda559688461762800d7fb34d9e8a6a7444fd76ec33220a926c8be1516"}, + {file = "yarl-1.8.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d617c241c8c3ad5c4e78a08429fa49e4b04bedfc507b34b4d8dceb83b4af3588"}, + {file = "yarl-1.8.2-cp39-cp39-win32.whl", hash = "sha256:cb6d48d80a41f68de41212f3dfd1a9d9898d7841c8f7ce6696cf2fd9cb57ef83"}, + {file = "yarl-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:6604711362f2dbf7160df21c416f81fac0de6dbcf0b5445a2ef25478ecc4c778"}, + {file = "yarl-1.8.2.tar.gz", hash = "sha256:49d43402c6e3013ad0978602bf6bf5328535c48d192304b91b97a3c6790b1562"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + [metadata] lock-version = "2.0" python-versions = "^3.11" -content-hash = "4e7ad671ba8bb755adbc8178fc8c7a2f63ae7e2c484da40a944459676ca857f0" +content-hash = "eb19a63136fcae2fc1b0fcf840bf3c66329c6c27d8112c3908bae260dd5cc93e" diff --git a/pyproject.toml b/pyproject.toml index c338dd2..071748f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,20 +4,26 @@ version = "v0.6.1" description = "A simple web UI for small-scale Headscale deployments." authors = ["Albert Copeland "] license = "AGPL" +packages = [ + { include = "*.py" } +] +readme = "README.md" +repository = "https://github.com/iFargle/headscale-webui" [tool.poetry.dependencies] python = "^3.11" requests = "^2.28.2" -Flask = "^2.2.2" +Flask = {extras = ["async"], version = "^2.2.3"} cryptography = "^39.0.0" -python-dateutil = "^2.8.2" -pytz = "^2022.7.1" -Flask-Executor = "^1.0.0" -PyYAML = "^6.0" pyuwsgi = "^2.0.21" gunicorn = "^20.1.0" flask-basicauth = "^0.2.0" flask-providers-oidc = "^1.2.1" +flask-pydantic = {git = "https://github.com/MarekPikula/flask-pydantic.git", rev = "dictable_models"} +headscale-api = {git = "https://github.com/MarekPikula/python-headscale-api.git"} +betterproto = {git = "https://github.com/MarekPikula/python-betterproto.git", rev = "classmethod_from_dict"} +apscheduler = "^3.10.1" +tzdata = "^2023.3" [tool.poetry.group.dev.dependencies] pylint = "^2.17.0" @@ -27,9 +33,18 @@ ruff = "^0.0.260" pre-commit = "^3.2.1" mypy = "^1.1.1" pydocstyle = "^6.3.0" +pylint-pydantic = "^0.1.8" +types-requests = "^2.28.11.17" +coverage = "^7.2.3" +gitpython = "^3.1.31" [build-system] requires = ["poetry-core>=1.0.0"] [tool.isort] profile = "black" + +[tool.pylint.main] +extension-pkg-whitelist = ["pydantic"] +load-plugins = ["pylint_pydantic"] +generated-members = "app.logger.debug,\napp.logger.info,\napp.logger.warning,\napp.logger.error,\napp.logger.critical,\napp.logger.exception,\napp.logger.setLevel" diff --git a/renderer.py b/renderer.py index c69985d..230cf62 100644 --- a/renderer.py +++ b/renderer.py @@ -1,345 +1,266 @@ -import logging -import os -from concurrent.futures import ALL_COMPLETED, wait -from datetime import datetime +"""Page rendering functions. -import pytz -import yaml -from dateutil import parser -from flask import Flask, Markup, render_template -from flask_executor import Executor +TODO: Move some parts to Jinja templates. +""" + + +import asyncio +import datetime + +from flask import current_app, render_template +from flask_oidc import OpenIDConnect # type: ignore +from headscale_api.schema.headscale import v1 as schema +from markupsafe import Markup -import headscale import helper - -LOG_LEVEL = os.environ["LOG_LEVEL"].replace('"', "").upper() -# Initiate the Flask application and logging: -app = Flask(__name__, static_url_path="/static") -match LOG_LEVEL: - case "DEBUG": - app.logger.setLevel(logging.DEBUG) - case "INFO": - app.logger.setLevel(logging.INFO) - case "WARNING": - app.logger.setLevel(logging.WARNING) - case "ERROR": - app.logger.setLevel(logging.ERROR) - case "CRITICAL": - app.logger.setLevel(logging.CRITICAL) -executor = Executor(app) +from config import Config +from headscale import HeadscaleApi -def render_overview(): - app.logger.info("Rendering the Overview page") - url = headscale.get_url() - api_key = headscale.get_api_key() +async def render_overview(headscale: HeadscaleApi): + """Render the overview page.""" + current_app.logger.info("Rendering the Overview page") - timezone = pytz.timezone(os.environ["TZ"] if os.environ["TZ"] else "UTC") - local_time = timezone.localize(datetime.now()) + local_time = datetime.datetime.now(headscale.app_config.timezone) - # Overview page will just read static information from the config file and display it - # Open the config.yaml and parse it. - config_file = "" - try: - config_file = open("/etc/headscale/config.yml", "r") - app.logger.info("Opening /etc/headscale/config.yml") - except: - config_file = open("/etc/headscale/config.yaml", "r") - app.logger.info("Opening /etc/headscale/config.yaml") - config_yaml = yaml.safe_load(config_file) + # Get and display overview of the following information: + # server's machines, users, preauth keys, API key expiration, server version - # Get and display the following information: - # Overview of the server's machines, users, preauth keys, API key expiration, server version - - # Get all machines: - machines = headscale.get_machines(url, api_key) - machines_count = len(machines["machines"]) + async with headscale.session: + machines, routes, users = await asyncio.gather( + headscale.list_machines(schema.ListMachinesRequest("")), + headscale.get_routes(schema.GetRoutesRequest()), + headscale.list_users(schema.ListUsersRequest()), + ) + user_preauth_keys: list[schema.ListPreAuthKeysResponse] = await asyncio.gather( + *[ + headscale.list_pre_auth_keys(schema.ListPreAuthKeysRequest(user.name)) + for user in users.users + ] + ) # Need to check if routes are attached to an active machine: # ISSUE: https://github.com/iFargle/headscale-webui/issues/36 # ISSUE: https://github.com/juanfont/headscale/issues/1228 # Get all routes: - routes = headscale.get_routes(url, api_key) - - total_routes = 0 - for route in routes["routes"]: - if int(route["machine"]["id"]) != 0: - total_routes += 1 - - enabled_routes = 0 - for route in routes["routes"]: - if ( - route["enabled"] - and route["advertised"] - and int(route["machine"]["id"]) != 0 - ): - enabled_routes += 1 + total_routes = sum(route.machine.id != 0 for route in routes.routes) + enabled_routes = sum( + route.enabled and route.advertised and route.machine.id != 0 + for route in routes.routes + ) # Get a count of all enabled exit routes exits_count = 0 exits_enabled_count = 0 - for route in routes["routes"]: - if route["advertised"] and int(route["machine"]["id"]) != 0: - if route["prefix"] == "0.0.0.0/0" or route["prefix"] == "::/0": + for route in routes.routes: + if route.advertised and route.machine.id != 0: + if route.prefix in ("0.0.0.0/0", "::/0"): exits_count += 1 - if route["enabled"]: + if route.enabled: exits_enabled_count += 1 # Get User and PreAuth Key counts - user_count = 0 - usable_keys_count = 0 - users = headscale.get_users(url, api_key) - for user in users["users"]: - user_count += 1 - preauth_keys = headscale.get_preauth_keys(url, api_key, user["name"]) - for key in preauth_keys["preAuthKeys"]: - expiration_parse = parser.parse(key["expiration"]) - key_expired = True if expiration_parse < local_time else False - if key["reusable"] and not key_expired: - usable_keys_count += 1 - if not key["reusable"] and not key["used"] and not key_expired: - usable_keys_count += 1 - - # General Content variables: - ( - ip_prefixes, - server_url, - disable_check_updates, - ephemeral_node_inactivity_timeout, - node_update_check_interval, - ) = ("N/A", "N/A", "N/A", "N/A", "N/A") - if "ip_prefixes" in config_yaml: - ip_prefixes = str(config_yaml["ip_prefixes"]) - if "server_url" in config_yaml: - server_url = str(config_yaml["server_url"]) - if "disable_check_updates" in config_yaml: - disable_check_updates = str(config_yaml["disable_check_updates"]) - if "ephemeral_node_inactivity_timeout" in config_yaml: - ephemeral_node_inactivity_timeout = str( - config_yaml["ephemeral_node_inactivity_timeout"] + usable_keys_count = sum( + sum( + (key.reusable or (not key.reusable and not key.used)) + and not key.expiration < local_time + for key in preauth_keys.pre_auth_keys ) - if "node_update_check_interval" in config_yaml: - node_update_check_interval = str(config_yaml["node_update_check_interval"]) - - # OIDC Content variables: - issuer, client_id, scope, use_expiry_from_token, expiry = ( - "N/A", - "N/A", - "N/A", - "N/A", - "N/A", + for preauth_keys in user_preauth_keys ) - if "oidc" in config_yaml: - if "issuer" in config_yaml["oidc"]: - issuer = str(config_yaml["oidc"]["issuer"]) - if "client_id" in config_yaml["oidc"]: - client_id = str(config_yaml["oidc"]["client_id"]) - if "scope" in config_yaml["oidc"]: - scope = str(config_yaml["oidc"]["scope"]) - if "use_expiry_from_token" in config_yaml["oidc"]: - use_expiry_from_token = str(config_yaml["oidc"]["use_expiry_from_token"]) - if "expiry" in config_yaml["oidc"]: - expiry = str(config_yaml["oidc"]["expiry"]) - - # Embedded DERP server information. - enabled, region_id, region_code, region_name, stun_listen_addr = ( - "N/A", - "N/A", - "N/A", - "N/A", - "N/A", - ) - if "derp" in config_yaml: - if "server" in config_yaml["derp"] and config_yaml["derp"]["server"]["enabled"]: - if "enabled" in config_yaml["derp"]["server"]: - enabled = str(config_yaml["derp"]["server"]["enabled"]) - if "region_id" in config_yaml["derp"]["server"]: - region_id = str(config_yaml["derp"]["server"]["region_id"]) - if "region_code" in config_yaml["derp"]["server"]: - region_code = str(config_yaml["derp"]["server"]["region_code"]) - if "region_name" in config_yaml["derp"]["server"]: - region_name = str(config_yaml["derp"]["server"]["region_name"]) - if "stun_listen_addr" in config_yaml["derp"]["server"]: - stun_listen_addr = str( - config_yaml["derp"]["server"]["stun_listen_addr"] - ) - - nameservers, magic_dns, domains, base_domain = "N/A", "N/A", "N/A", "N/A" - if "dns_config" in config_yaml: - if "nameservers" in config_yaml["dns_config"]: - nameservers = str(config_yaml["dns_config"]["nameservers"]) - if "magic_dns" in config_yaml["dns_config"]: - magic_dns = str(config_yaml["dns_config"]["magic_dns"]) - if "domains" in config_yaml["dns_config"]: - domains = str(config_yaml["dns_config"]["domains"]) - if "base_domain" in config_yaml["dns_config"]: - base_domain = str(config_yaml["dns_config"]["base_domain"]) # Start putting the content together - overview_content = ( - """ -
-
-
-
    -
  • Server Statistics

  • -
  • Machines Added
    """ - + str(machines_count) - + """
  • -
  • Users Added
    """ - + str(user_count) - + """
  • -
  • Usable Preauth Keys
    """ - + str(usable_keys_count) - + """
  • -
  • Enabled/Total Routes
    """ - + str(enabled_routes) - + """/""" - + str(total_routes) - + """
  • -
  • Enabled/Total Exits
    """ - + str(exits_enabled_count) - + """/""" - + str(exits_count) - + """
  • -
+ overview_content = f""" +
+
+
+
    +
  • Server Statistics

  • +
  • Machines Added +
    + {len(machines.machines)}
  • +
  • Users Added +
    + {len(users.users)}
  • +
  • Usable Preauth Keys +
    + {usable_keys_count}
  • +
  • Enabled/Total Routes +
    + {enabled_routes}/{total_routes}
  • +
  • Enabled/Total Exits +
    + {exits_enabled_count}/{exits_count}
  • +
+
+
-
-
- """ - ) - general_content = ( """ -
-
-
-
    -
  • General

  • -
  • IP Prefixes
    """ - + ip_prefixes - + """
  • -
  • Server URL
    """ - + server_url - + """
  • -
  • Updates Disabled
    """ - + disable_check_updates - + """
  • -
  • Ephemeral Node Inactivity Timeout
    """ - + ephemeral_node_inactivity_timeout - + """
  • -
  • Node Update Check Interval
    """ - + node_update_check_interval - + """
  • -
+ + # Overview page will just read static information from the config file and display + # it Open the config.yaml and parse it. + config_yaml = headscale.hs_config + + if config_yaml is None: + return Markup( + f"""
{overview_content} +
+
+
+
    +
  • General

  • +
  • + Headscale configuration is invalid or unavailable. + Please check logs.
  • +
+
+
+
+ """ + ) + + general_content = f""" +
+
+
+
    +
  • General

  • +
  • IP Prefixes +
    + {config_yaml.ip_prefixes or 'N/A'}
  • +
  • Server URL +
    + {config_yaml.server_url}
  • +
  • Updates Disabled +
    + {config_yaml.disable_check_updates or 'N/A'} +
  • +
  • Ephemeral Node Inactivity Timeout +
    + {config_yaml.ephemeral_node_inactivity_timeout or 'N/A'} +
  • +
  • Node Update Check Interval +
    + {config_yaml.node_update_check_interval or 'N/A'} +
  • +
+
+
-
-
- """ - ) + """ + + # OIDC Content: + oidc = config_yaml.oidc oidc_content = ( - """ -
-
-
-
    -
  • Headscale OIDC

  • -
  • Issuer
    """ - + issuer - + """
  • -
  • Client ID
    """ - + client_id - + """
  • -
  • Scope
    """ - + scope - + """
  • -
  • Use OIDC Token Expiry
    """ - + use_expiry_from_token - + """
  • -
  • Expiry
    """ - + expiry - + """
  • -
-
-
-
- """ - ) - derp_content = ( - """ -
-
-
-
    -
  • Embedded DERP

  • -
  • Enabled
    """ - + enabled - + """
  • -
  • Region ID
    """ - + region_id - + """
  • -
  • Region Code
    """ - + region_code - + """
  • -
  • Region Name
    """ - + region_name - + """
  • -
  • STUN Address
    """ - + stun_listen_addr - + """
  • -
-
-
-
- """ - ) - dns_content = ( - """ -
-
-
-
    -
  • DNS

  • -
  • DNS Nameservers
    """ - + nameservers - + """
  • -
  • MagicDNS
    """ - + magic_dns - + """
  • -
  • Search Domains
    """ - + domains - + """
  • -
  • Base Domain
    """ - + base_domain - + """
  • -
-
-
-
- """ + ( + f""" +
+
+
+
    +
  • Headscale OIDC

  • +
  • Issuer +
    + {oidc.issuer or 'N/A'}
  • +
  • Client ID +
    + {oidc.client_id or 'N/A'}
  • +
  • Scope +
    + {oidc.scope or 'N/A'}
  • +
  • Use OIDC Token Expiry +
    + {oidc.use_expiry_from_token or 'N/A'}
  • +
  • Expiry +
    + {oidc.expiry or 'N/A'}
  • +
+
+
+
+ """ + ) + if oidc is not None + else "" ) - # Remove content that isn't needed: - # Remove OIDC if it isn't available: - if "oidc" not in config_yaml: - oidc_content = "" - # Remove DERP if it isn't available or isn't enabled - if "derp" not in config_yaml: - derp_content = "" - if "derp" in config_yaml: - if "server" in config_yaml["derp"]: - if str(config_yaml["derp"]["server"]["enabled"]) == "False": - derp_content = "" + # Embedded DERP server information. + derp = config_yaml.derp + derp_content = ( + ( + f""" +
+
+
+
    +
  • Embedded DERP

  • +
  • Enabled +
    + {derp.server.enabled}
  • +
  • Region ID +
    + {derp.server.region_id or 'N/A'}
  • +
  • Region Code +
    + {derp.server.region_code or 'N/A'}
  • +
  • Region Name +
    + {derp.server.region_name or 'N/A'}
  • +
  • STUN Address +
    + {derp.server.stun_listen_addr or 'N/A'}
  • +
+
+
+
+ """ + ) + if derp is not None and derp.server is not None and derp.server.enabled + else "" + ) + + dns_config = config_yaml.dns_config + dns_content = ( + ( + f""" +
+
+
+
    +
  • DNS

  • +
  • DNS Nameservers +
    + {dns_config.nameservers or 'N/A'}
  • +
  • MagicDNS +
    + {dns_config.magic_dns or 'N/A'}
  • +
  • Search Domains +
    + {dns_config.domains or 'N/A'}
  • +
  • Base Domain +
    + {dns_config.base_domain or 'N/A'}
  • +
+
+
+
+ """ + ) + if dns_config is not None + else "" + ) # TODO: # Whether there are custom DERP servers - # If there are custom DERP servers, get the file location from the config file. Assume mapping is the same. + # If there are custom DERP servers, get the file location from the config + # file. Assume mapping is the same. # Whether the built-in DERP server is enabled # The IP prefixes # The DNS config - if config_yaml["derp"]["paths"]: - pass + # if derp is not None and derp.paths is not None: + # pass # # open the path: # derp_file = # config_file = open("/etc/headscale/config.yaml", "r") @@ -350,44 +271,44 @@ def render_overview(): # The log level # What kind of Database is being used to drive headscale - content = ( + return Markup( "
" + overview_content + general_content + derp_content + oidc_content + dns_content - + "" ) - return Markup(content) -def thread_machine_content( - machine, machine_content, idx, all_routes, failover_pair_prefixes -): +async def thread_machine_content( + headscale: HeadscaleApi, + machine: schema.Machine, + idx: int, + all_routes: schema.GetRoutesResponse, +) -> str: + """Render a single machine.""" # machine = passed in machine information # content = place to write the content - # app.logger.debug("Machine Information") - # app.logger.debug(str(machine)) - app.logger.debug("Machine Information =================") - app.logger.debug( - "Name: %s, ID: %s, User: %s, givenName: %s, ", - str(machine["name"]), - str(machine["id"]), - str(machine["user"]["name"]), - str(machine["givenName"]), + failover_pair_prefixes: list[str] = [] + current_app.logger.debug("Machine Information =================") + current_app.logger.debug( + "Name: %s, ID: %i, User: %s, givenName: %s", + machine.name, + machine.id, + machine.user.name, + machine.given_name, ) - url = headscale.get_url() - api_key = headscale.get_api_key() - # Set the current timezone and local time - timezone = pytz.timezone(os.environ["TZ"] if os.environ["TZ"] else "UTC") - local_time = timezone.localize(datetime.now()) + timezone = headscale.app_config.timezone + local_time = datetime.datetime.now(timezone) # Get the machines routes - pulled_routes = headscale.get_machine_routes(url, api_key, machine["id"]) + pulled_routes = await headscale.get_machine_routes( + schema.GetMachineRoutesRequest(machine.id) + ) routes = "" # Test if the machine is an exit node: @@ -397,520 +318,385 @@ def thread_machine_content( ha_enabled = False # If the length of "routes" is NULL/0, there are no routes, enabled or disabled: - if len(pulled_routes["routes"]) > 0: - advertised_routes = False + if len(pulled_routes.routes) > 0: + # First, check if there are any routes that are both enabled and advertised If + # that is true, we will output the collection-item for routes. Otherwise, it + # will not be displayed. + advertised_routes = any(route.advertised for route in pulled_routes.routes) - # First, check if there are any routes that are both enabled and advertised - # If that is true, we will output the collection-item for routes. Otherwise, it will not be displayed. - for route in pulled_routes["routes"]: - if route["advertised"]: - advertised_routes = True if advertised_routes: routes = """
  • directions Routes

    - """ - # app.logger.debug("Pulled Routes Dump: "+str(pulled_routes)) - # app.logger.debug("All Routes Dump: "+str(all_routes)) + """ + # current_app.logger.debug("Pulled Routes Dump: "+str(pulled_routes)) + # current_app.logger.debug("All Routes Dump: "+str(all_routes)) # Find all exits and put their ID's into the exit_routes array - exit_routes = [] + exit_routes: list[int] = [] exit_enabled_color = "red" exit_tooltip = "enable" exit_route_enabled = False - for route in pulled_routes["routes"]: - if route["prefix"] == "0.0.0.0/0" or route["prefix"] == "::/0": - exit_routes.append(route["id"]) + for route in pulled_routes.routes: + if route.prefix in ("0.0.0.0/0", "::/0"): + exit_routes.append(route.id) exit_route_found = True # Test if it is enabled: - if route["enabled"]: + if route.enabled: exit_enabled_color = "green" exit_tooltip = "disable" exit_route_enabled = True - app.logger.debug("Found exit route ID's: " + str(exit_routes)) - app.logger.debug( - "Exit Route Information: ID: %s | Enabled: %s | exit_route_enabled: %s / Found: %s", - str(route["id"]), - str(route["enabled"]), - str(exit_route_enabled), - str(exit_route_found), + current_app.logger.debug("Found exit route ID's: %s", exit_routes) + current_app.logger.debug( + "Exit Route Information: ID: %i | Enabled: %r | " + "exit_route_enabled: %r / Found: %r", + route.id, + route.enabled, + exit_route_enabled, + exit_route_found, ) # Print the button for the Exit routes: if exit_route_found: - routes = ( - routes - + """

    - Exit Route -

    - """ + routes += ( + f"

    " + "Exit Route

    " ) # Check if the route has another enabled identical route. # Check all routes from the current machine... - for route in pulled_routes["routes"]: + for route in pulled_routes.routes: # ... against all routes from all machines .... - for route_info in all_routes["routes"]: - app.logger.debug( - "Comparing routes %s and %s", - str(route["prefix"]), - str(route_info["prefix"]), + for route_info in all_routes.routes: + current_app.logger.debug( + "Comparing routes %s and %s", route.prefix, route_info.prefix ) # ... If the route prefixes match and are not exit nodes ... - if str(route_info["prefix"]) == str(route["prefix"]) and ( - route["prefix"] != "0.0.0.0/0" and route["prefix"] != "::/0" + if route_info.prefix == route.prefix and ( + route.prefix not in ("0.0.0.0/0", "::/0") ): # Check if the route ID's match. If they don't ... - app.logger.debug( - "Found a match: %s and %s", - str(route["prefix"]), - str(route_info["prefix"]), + current_app.logger.debug( + "Found a match: %s and %s", route.prefix, route_info.prefix ) - if route_info["id"] != route["id"]: - app.logger.debug( - "Route ID's don't match. They're on different nodes." + if route_info.id != route.id: + current_app.logger.debug( + "Route ID's don't match. They're on different nodes." ) # ... Check if the routes prefix is already in the array... - if route["prefix"] not in failover_pair_prefixes: + if route.prefix not in failover_pair_prefixes: # IF it isn't, add it. - app.logger.info( - "New HA pair found: %s", str(route["prefix"]) + current_app.logger.info( + "New HA pair found: %s", route.prefix ) - failover_pair_prefixes.append(str(route["prefix"])) - if route["enabled"] and route_info["enabled"]: + failover_pair_prefixes.append(route.prefix) + if route.enabled and route_info.enabled: # If it is already in the array. . . # Show as HA only if both routes are enabled: - app.logger.debug( - "Both routes are enabled. Setting as HA [%s] (%s) ", - str(machine["name"]), - str(route["prefix"]), + current_app.logger.debug( + "Both routes are enabled. Setting as HA [%s] (%s) ", + machine.name, + route.prefix, ) ha_enabled = True - # If the route is an exit node and already counted as a failover route, it IS a failover route, so display it. + # If the route is an exit node and already counted as a failover route, + # it IS a failover route, so display it. if ( - route["prefix"] != "0.0.0.0/0" - and route["prefix"] != "::/0" - and route["prefix"] in failover_pair_prefixes + route.prefix not in ("0.0.0.0/0", "::/0") + and route.prefix in failover_pair_prefixes ): route_enabled = "red" route_tooltip = "enable" - color_index = failover_pair_prefixes.index(str(route["prefix"])) + color_index = failover_pair_prefixes.index(route.prefix) route_enabled_color = helper.get_color(color_index, "failover") - if route["enabled"]: - color_index = failover_pair_prefixes.index(str(route["prefix"])) + if route.enabled: + color_index = failover_pair_prefixes.index(route.prefix) route_enabled = helper.get_color(color_index, "failover") route_tooltip = "disable" - routes = ( - routes - + """

    - """ - + route["prefix"] - + """ -

    - """ + routes += ( + f"

    " + f"{route.prefix}

    " ) # Get the remaining routes: - for route in pulled_routes["routes"]: + for route in pulled_routes.routes: # Get the remaining routes - No exits or failover pairs if ( - route["prefix"] != "0.0.0.0/0" - and route["prefix"] != "::/0" - and route["prefix"] not in failover_pair_prefixes + route.prefix not in ("0.0.0.0/0", "::/0") + and route.prefix not in failover_pair_prefixes ): - app.logger.debug( - "Route: [" - + str(route["machine"]["name"]) - + "] id: " - + str(route["id"]) - + " / prefix: " - + str(route["prefix"]) - + " enabled?: " - + str(route["enabled"]) + current_app.logger.debug( + "Route: [%s] id: %i / prefix: %s enabled?: %r", + route.machine.name, + route.id, + route.prefix, + route.enabled, ) route_enabled = "red" route_tooltip = "enable" - if route["enabled"]: + if route.enabled: route_enabled = "green" route_tooltip = "disable" - routes = ( - routes - + """

    - """ - + route["prefix"] - + """ -

    - """ + routes += ( + f"

    {route.prefix}

    " ) - routes = routes + "

  • " + routes += "

    " # Get machine tags - tag_array = "" - for tag in machine["forcedTags"]: - tag_array = tag_array + "{tag: '" + tag[4:] + "'}, " - tags = ( - """ + tag_array = ", ".join(f"{{tag: '{tag[4:]}'}}" for tag in machine.forced_tags) + tags = f"""
  • - label + label Tags -

    +

  • """ - ) # Get the machine IP's - machine_ips = "
      " - for ip_address in machine["ipAddresses"]: - machine_ips = machine_ips + "
    • " + ip_address + "
    • " - machine_ips = machine_ips + "
    " + machine_ips = ( + "
      " + + "".join(f"
    • {ip_address}
    • " for ip_address in machine.ip_addresses) + + "
    " + ) # Format the dates for easy readability - last_seen_parse = parser.parse(machine["lastSeen"]) - last_seen_local = last_seen_parse.astimezone(timezone) + last_seen_local = machine.last_seen.astimezone(timezone) last_seen_delta = local_time - last_seen_local last_seen_print = helper.pretty_print_duration(last_seen_delta) last_seen_time = ( str(last_seen_local.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) - + " (" - + str(last_seen_print) - + ")" + + f" {timezone} ({last_seen_print})" ) - last_update_parse = ( - local_time - if machine["lastSuccessfulUpdate"] is None - else parser.parse(machine["lastSuccessfulUpdate"]) - ) - last_update_local = last_update_parse.astimezone(timezone) - last_update_delta = local_time - last_update_local - last_update_print = helper.pretty_print_duration(last_update_delta) - last_update_time = ( - str(last_update_local.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) - + " (" - + str(last_update_print) - + ")" - ) + if machine.last_successful_update is not None: + last_update_local = machine.last_successful_update.astimezone(timezone) + last_update_delta = local_time - last_update_local + last_update_print = helper.pretty_print_duration(last_update_delta) + last_update_time = ( + str(last_update_local.strftime("%A %m/%d/%Y, %H:%M:%S")) + + f" {timezone} ({last_update_print})" + ) + else: + last_update_print = None + last_update_time = None - created_parse = parser.parse(machine["createdAt"]) - created_local = created_parse.astimezone(timezone) + created_local = machine.created_at.astimezone(timezone) created_delta = local_time - created_local created_print = helper.pretty_print_duration(created_delta) created_time = ( str(created_local.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) - + " (" - + str(created_print) - + ")" + + f" {timezone} ({created_print})" ) # If there is no expiration date, we don't need to do any calculations: - if machine["expiry"] != "0001-01-01T00:00:00Z": - expiry_parse = parser.parse(machine["expiry"]) - expiry_local = expiry_parse.astimezone(timezone) + if machine.expiry != datetime.datetime(1, 1, 1, 0, 0, tzinfo=datetime.timezone.utc): + expiry_local = machine.expiry.astimezone(timezone) expiry_delta = expiry_local - local_time expiry_print = helper.pretty_print_duration(expiry_delta, "expiry") if str(expiry_local.strftime("%Y")) in ("0001", "9999", "0000"): expiry_time = "No expiration date." elif int(expiry_local.strftime("%Y")) > int(expiry_local.strftime("%Y")) + 2: expiry_time = ( - str(expiry_local.strftime("%m/%Y")) - + " " - + str(timezone) - + " (" - + str(expiry_print) - + ")" + str(expiry_local.strftime("%m/%Y")) + f" {timezone} ({expiry_print})" ) else: expiry_time = ( str(expiry_local.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) - + " (" - + str(expiry_print) - + ")" + + f" {timezone} ({expiry_print})" ) - expiring_soon = ( - True - if int(expiry_delta.days) < 14 and int(expiry_delta.days) > 0 - else False - ) - app.logger.debug( - "Machine: " - + machine["name"] - + " expires: " - + str(expiry_local.strftime("%Y")) - + " / " - + str(expiry_delta.days) + expiring_soon = int(expiry_delta.days) < 14 and int(expiry_delta.days) > 0 + current_app.logger.debug( + "Machine: %s expires: %s / %i", + machine.name, + expiry_local.strftime("%Y"), + expiry_delta.days, ) else: expiry_time = "No expiration date." expiring_soon = False - app.logger.debug("Machine: " + machine["name"] + " has no expiration date") + current_app.logger.debug("Machine: %s has no expiration date", machine.name) # Get the first 10 characters of the PreAuth Key: - if machine["preAuthKey"]: - preauth_key = str(machine["preAuthKey"]["key"])[0:10] + if machine.pre_auth_key is not None: + preauth_key = machine.pre_auth_key.key[0:10] else: preauth_key = "None" # Set the status and user badge color: text_color = helper.text_color_duration(last_seen_delta) - user_color = helper.get_color(int(machine["user"]["id"])) + user_color = helper.get_color(int(machine.user.id)) # Generate the various badges: status_badge = ( - "fiber_manual_record" + f"" + "fiber_manual_record" ) user_badge = ( - "" - + machine["user"]["name"] - + "" + f"{machine.user.name}" ) exit_node_badge = ( "" if not exit_route_enabled - else "Exit" + else ( + "Exit" + ) ) ha_route_badge = ( "" if not ha_enabled - else "HA" + else ( + "HA" + ) ) expiration_badge = ( "" if not expiring_soon - else "Expiring!" - ) - - machine_content[idx] = str( - render_template( - "machines_card.html", - given_name=machine["givenName"], - machine_id=machine["id"], - hostname=machine["name"], - ns_name=machine["user"]["name"], - ns_id=machine["user"]["id"], - ns_created=machine["user"]["createdAt"], - last_seen=str(last_seen_print), - last_update=str(last_update_print), - machine_ips=Markup(machine_ips), - advertised_routes=Markup(routes), - exit_node_badge=Markup(exit_node_badge), - ha_route_badge=Markup(ha_route_badge), - status_badge=Markup(status_badge), - user_badge=Markup(user_badge), - last_update_time=str(last_update_time), - last_seen_time=str(last_seen_time), - created_time=str(created_time), - expiry_time=str(expiry_time), - preauth_key=str(preauth_key), - expiration_badge=Markup(expiration_badge), - machine_tags=Markup(tags), - taglist=machine["forcedTags"], + else ( + "" + "Expiring!" ) ) - app.logger.info( - "Finished thread for machine " + machine["givenName"] + " index " + str(idx) + + current_app.logger.info( + "Finished thread for machine %s index %i", machine.given_name, idx + ) + return render_template( + "machines_card.html", + given_name=machine.given_name, + machine_id=machine.id, + hostname=machine.name, + ns_name=machine.user.name, + ns_id=machine.user.id, + ns_created=machine.user.created_at, + last_seen=str(last_seen_print), + last_update=str(last_update_print), + machine_ips=Markup(machine_ips), + advertised_routes=Markup(routes), + exit_node_badge=Markup(exit_node_badge), + ha_route_badge=Markup(ha_route_badge), + status_badge=Markup(status_badge), + user_badge=Markup(user_badge), + last_update_time=str(last_update_time), + last_seen_time=str(last_seen_time), + created_time=str(created_time), + expiry_time=str(expiry_time), + preauth_key=str(preauth_key), + expiration_badge=Markup(expiration_badge), + machine_tags=Markup(tags), + taglist=machine.forced_tags, ) -def render_machines_cards(): - app.logger.info("Rendering machine cards") - url = headscale.get_url() - api_key = headscale.get_api_key() - machines_list = headscale.get_machines(url, api_key) +async def render_machines_cards(headscale: HeadscaleApi): + """Render machine cards.""" + current_app.logger.info("Rendering machine cards") - ######################################### - # Thread this entire thing. - num_threads = len(machines_list["machines"]) - iterable = [] - machine_content = {} - failover_pair_prefixes = [] - for i in range(0, num_threads): - app.logger.debug("Appending iterable: " + str(i)) - iterable.append(i) - # Flask-Executor Method: - - # Get all routes - all_routes = headscale.get_routes(url, api_key) - # app.logger.debug("All found routes") - # app.logger.debug(str(all_routes)) - - if LOG_LEVEL == "DEBUG": - # DEBUG: Do in a forloop: - for idx in iterable: - thread_machine_content( - machines_list["machines"][idx], - machine_content, - idx, - all_routes, - failover_pair_prefixes, - ) - else: - app.logger.info("Starting futures") - futures = [ - executor.submit( - thread_machine_content, - machines_list["machines"][idx], - machine_content, - idx, - all_routes, - failover_pair_prefixes, - ) - for idx in iterable - ] - # Wait for the executor to finish all jobs: - wait(futures, return_when=ALL_COMPLETED) - app.logger.info("Finished futures") - - # Sort the content by machine_id: - sorted_machines = { - key: val for key, val in sorted(machine_content.items(), key=lambda ele: ele[0]) - } - - content = "" - - return Markup(content) + async with headscale.session: + # Execute concurrent machine info requests and sort them by machine_id. + routes = await headscale.get_routes(schema.GetRoutesRequest()) + content = await asyncio.gather( + *[ + thread_machine_content(headscale, machine, idx, routes) + for idx, machine in enumerate( + ( + await headscale.list_machines(schema.ListMachinesRequest("")) + ).machines + ) + ] + ) + return Markup("") -def render_users_cards(): - app.logger.info("Rendering Users cards") - url = headscale.get_url() - api_key = headscale.get_api_key() - user_list = headscale.get_users(url, api_key) +async def render_users_cards(headscale: HeadscaleApi): + """Render users cards.""" + current_app.logger.info("Rendering Users cards") - content = "" - return Markup(content) + return Markup("") -def build_preauth_key_table(user_name): - app.logger.info("Building the PreAuth key table for User: %s", str(user_name)) - url = headscale.get_url() - api_key = headscale.get_api_key() +async def build_user_card(headscale: HeadscaleApi, user: schema.User): + """Build a user card.""" + # Get all preAuth Keys in the user, only display if one exists: + preauth_keys_collection = await build_preauth_key_table( + headscale, schema.ListPreAuthKeysRequest(user.name) + ) - preauth_keys = headscale.get_preauth_keys(url, api_key, user_name) - preauth_keys_collection = ( - """
  • + # Set the user badge color: + user_color = helper.get_color(int(user.id), "text") + + # Generate the various badges: + status_badge = ( + f"" + "fiber_manual_record" + ) + + return render_template( + "users_card.html", + status_badge=Markup(status_badge), + user_name=user.name, + user_id=user.id, + preauth_keys_collection=Markup(preauth_keys_collection), + ) + + +async def build_preauth_key_table( + headscale: HeadscaleApi, request: schema.ListPreAuthKeysRequest +): + """Build PreAuth key table for a user.""" + current_app.logger.info( + "Building the PreAuth key table for User: %s", request.user + ) + + preauth_keys = await headscale.list_pre_auth_keys(request) + preauth_keys_collection = f""" +
  • Add PreAuth Key vpn_key PreAuth Keys - """ - ) - if len(preauth_keys["preAuthKeys"]) == 0: + """ + if len(preauth_keys.pre_auth_keys) == 0: preauth_keys_collection += "

    No keys defined for this user

    " - if len(preauth_keys["preAuthKeys"]) > 0: - preauth_keys_collection += ( + else: + preauth_keys_collection += f""" + + + + + + + + + + """ -
    IDKey Prefix
    Reusable
    Used
    Ephemeral
    Usable
    Actions
    - - - - - - - - - - - - """ - ) - for key in preauth_keys["preAuthKeys"]: + for key in preauth_keys.pre_auth_keys: # Get the key expiration date and compare it to now to check if it's expired: # Set the current timezone and local time - timezone = pytz.timezone(os.environ["TZ"] if os.environ["TZ"] else "UTC") - local_time = timezone.localize(datetime.now()) - expiration_parse = parser.parse(key["expiration"]) - key_expired = True if expiration_parse < local_time else False + timezone = headscale.app_config.timezone + local_time = datetime.datetime.now(timezone) + key_expired = key.expiration < local_time expiration_time = ( - str(expiration_parse.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) + key.expiration.strftime("%A %m/%d/%Y, %H:%M:%S") + f" {timezone}" ) - key_usable = False - if key["reusable"] and not key_expired: - key_usable = True - if not key["reusable"] and not key["used"] and not key_expired: - key_usable = True + key_usable = (key.reusable and not key_expired) or ( + not key.reusable and not key.used and not key_expired + ) # Class for the javascript function to look for to toggle the hide function hide_expired = "expired-row" if not key_usable else "" btn_reusable = ( - "fiber_manual_record" - if key["reusable"] + "" + "::" + "fiber_manual_record" + if key.reusable else "" ) btn_ephemeral = ( - "fiber_manual_record" - if key["ephemeral"] + "" + "fiber_manual_record" + if key.ephemeral else "" ) btn_used = ( - "fiber_manual_record" - if key["used"] + "" + "fiber_manual_record" + if key.used else "" ) btn_usable = ( - "fiber_manual_record" + "" + "fiber_manual_record" if key_usable else "" ) # Other buttons: btn_delete = ( - "Expire" + "' + "Expire" if key_usable else "" ) - tooltip_data = "Expiration: " + expiration_time + tooltip_data = f"Expiration: {expiration_time}" # TR ID will look like "1-albert-tr" - preauth_keys_collection = ( - preauth_keys_collection - + """ - - - - - - - - + preauth_keys_collection += f""" + + + + + + + + - """ - ) + """ - preauth_keys_collection = ( - preauth_keys_collection - + """
    IDKey Prefix
    Reusable
    Used
    Ephemeral
    Usable
    Actions
    """ - + str(key["id"]) - + """""" - + str(key["key"])[0:10] - + """
    """ - + btn_reusable - + """
    """ - + btn_used - + """
    """ - + btn_ephemeral - + """
    """ - + btn_usable - + """
    """ - + btn_delete - + """
    {key.id}{key.key[0:10]}
    {btn_reusable}
    {btn_used}
    {btn_ephemeral}
    {btn_usable}
    {btn_delete}
    -
  • - """ - ) - return preauth_keys_collection + return preauth_keys_collection + "" -def oidc_nav_dropdown(user_name, email_address, name): - app.logger.info("OIDC is enabled. Building the OIDC nav dropdown") - html_payload = ( - """ +def oidc_nav_dropdown(user_name: str, email_address: str, name: str) -> Markup: + """Render desktop navigation for OIDC.""" + current_app.logger.debug("OIDC is enabled. Building the OIDC nav dropdown") + html_payload = f"""
  • - """ - + name - + """ account_circle + {name} account_circle
  • - """ + """ + return Markup(html_payload) + + +def oidc_nav_mobile(): + """Render mobile navigation for OIDC.""" + return Markup( + '

  • ' + "exit_to_appLogout
  • " ) - return Markup(html_payload) -def oidc_nav_mobile(user_name, email_address, name): - html_payload = """ -

  • exit_to_appLogout
  • +def render_defaults( + config: Config, oidc_handler: OpenIDConnect | None +) -> dict[str, Markup | str]: + """Render the default elements. + + TODO: Think about caching the results. """ - return Markup(html_payload) + colors = { + "color_nav": config.color_nav, + "color_btn": config.color_btn, + } + + if oidc_handler is None: + return colors + + # If OIDC is enabled, display the buttons: + email_address: str = oidc_handler.user_getfield("email") # type: ignore + assert isinstance(email_address, str) + user_name: str = oidc_handler.user_getfield("preferred_username") # type: ignore + assert isinstance(user_name, str) + name: str = oidc_handler.user_getfield("name") # type: ignore + assert isinstance(name, str) + + return { + "oidc_nav_dropdown": oidc_nav_dropdown(user_name, email_address, name), + "oidc_nav_mobile": oidc_nav_mobile(), + **colors, + } def render_search(): - html_payload = """ -
  • - search -
  • - """ - return Markup(html_payload) + """Render search bar.""" + return Markup( + """ +
  • + search +
  • + """ + ) -def render_routes(): - app.logger.info("Rendering Routes page") - url = headscale.get_url() - api_key = headscale.get_api_key() - all_routes = headscale.get_routes(url, api_key) +async def render_routes(headscale: HeadscaleApi): + """Render routes page.""" + current_app.logger.info("Rendering Routes page") + all_routes = await headscale.get_routes(schema.GetRoutesRequest()) # If there are no routes, just exit: - if len(all_routes) == 0: + if len(all_routes.routes) == 0: return Markup("


    There are no routes to display!
    ") # Get a list of all Route ID's to iterate through: - all_routes_id_list = [] - for route in all_routes["routes"]: - all_routes_id_list.append(route["id"]) - if route["machine"]["name"]: - app.logger.info( - "Found route %s / machine: %s", - str(route["id"]), - route["machine"]["name"], + all_routes_id_list: list[int] = [] + for route in all_routes.routes: + all_routes_id_list.append(route.id) + if route.machine.name: + current_app.logger.info( + "Found route %i / machine: %s", route.id, route.machine.name ) else: - app.logger.info("Route id %s has no machine associated.", str(route["id"])) + current_app.logger.info("Route id %i has no machine associated.", route.id) route_content = "" failover_content = "" @@ -1151,41 +922,40 @@ def render_routes(): ############################################################################################## # Step 1: Get all non-exit and non-failover routes: - route_content = markup_pre + route_title - route_content += """

    - - - - - - - - - - """ - for route in all_routes["routes"]: + route_content = ( + markup_pre + + route_title + + """ +

    ID Machine Route Enabled
    + + + + + + + + + + """ + ) + for route in all_routes.routes: # Get relevant info: - route_id = route["id"] - machine = route["machine"]["givenName"] - prefix = route["prefix"] - is_enabled = route["enabled"] - is_primary = route["isPrimary"] + machine = route.machine.given_name + prefix = route.prefix + is_enabled = route.enabled + is_primary = route.is_primary is_failover = False is_exit = False enabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) disabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) # Set the displays: @@ -1194,60 +964,51 @@ def render_routes(): if is_enabled: enabled_display = enabled # Check if a prefix is an Exit route: - if prefix == "0.0.0.0/0" or prefix == "::/0": + if prefix in ("0.0.0.0/0", "::/0"): is_exit = True # Check if a prefix is part of a failover pair: - for route_check in all_routes["routes"]: - if not is_exit: - if route["prefix"] == route_check["prefix"]: - if route["id"] != route_check["id"]: - is_failover = True + for route_check in all_routes.routes: + if ( + not is_exit + and route.prefix == route_check.prefix + and route.id != route_check.id + ): + is_failover = True if not is_exit and not is_failover and machine != "": # Build a simple table for all non-exit routes: - route_content += ( - """ - - - - - - - """ - ) + route_content += f""" + + + + + """ route_content += "
    ID Machine Route Enabled
    """ - + str(route_id) - + """""" - + str(machine) - + """""" - + str(prefix) - + """
    """ - + str(enabled_display) - + """
    {route.id}{machine}{prefix}
    {enabled_display}

    " + markup_post ############################################################################################## # Step 2: Get all failover routes only. Add a separate table per failover prefix - failover_route_prefix = [] - failover_available = False - for route in all_routes["routes"]: - # Get a list of all prefixes for all routes... - for route_check in all_routes["routes"]: - # ... that aren't exit routes... - if route["prefix"] != "0.0.0.0/0" and route["prefix"] != "::/0": - # if the curren route matches any prefix of any other route... - if route["prefix"] == route_check["prefix"]: - # and the route ID's are different ... - if route["id"] != route_check["id"]: - # ... and the prefix is not already in the list... - if route["prefix"] not in failover_route_prefix: - # append the prefix to the failover_route_prefix list - failover_route_prefix.append(route["prefix"]) - failover_available = True + # Get a set of all prefixes for all routes: + # - that aren't exit routes + # - the current route matches any prefix of any other route + # - the route ID's are different + failover_route_prefix = set( + route.prefix + for route_check in all_routes.routes + for route in all_routes.routes + if ( + route.prefix not in ("0.0.0.0/0", "::/0") + and route.prefix == route.prefix + and route.id != route_check.id + ) + ) - if failover_available: + if len(failover_route_prefix) > 0: # Set up the display code: - enabled = "fiber_manual_record" + enabled = ( + "" + "fiber_manual_record" + ) disabled = ( "fiber_manual_record" ) @@ -1256,105 +1017,82 @@ def render_routes(): # Build the display for failover routes: for route_prefix in failover_route_prefix: # Get all route ID's associated with the route_prefix: - route_id_list = [] - for route in all_routes["routes"]: - if route["prefix"] == route_prefix: - route_id_list.append(route["id"]) + route_id_list = [ + route.id for route in all_routes.routes if route.prefix == route_prefix + ] # Set up the display code: failover_enabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) failover_disabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) failover_display = failover_disabled for route_id in route_id_list: # Get the routes index: current_route_index = all_routes_id_list.index(route_id) - if all_routes["routes"][current_route_index]["enabled"]: + if all_routes.routes[current_route_index].enabled: failover_display = failover_enabled # Get all route_id's associated with the route prefix: - failover_content += ( - """

    -

    """ - + failover_display - + """
    """ - + str(route_prefix) - + """
    - - - - - - - - - - """ - ) + failover_content += f"""

    +

    {failover_display}
    {route_prefix}
    +
    MachineEnabledPrimary
    + + + + + + + + + """ # Build the display: for route_id in route_id_list: idx = all_routes_id_list.index(route_id) - machine = all_routes["routes"][idx]["machine"]["givenName"] - machine_id = all_routes["routes"][idx]["machine"]["id"] - is_primary = all_routes["routes"][idx]["isPrimary"] - is_enabled = all_routes["routes"][idx]["enabled"] + machine = all_routes.routes[idx].machine.given_name + machine_id = all_routes.routes[idx].machine.id + is_primary = all_routes.routes[idx].is_primary + is_enabled = all_routes.routes[idx].enabled - payload = [] - for item in route_id_list: - payload.append(int(item)) + payload = route_id_list.copy() - app.logger.debug( - "[%s] Machine: [%s] %s : %s / %s", - str(route_id), - str(machine_id), - str(machine), - str(is_enabled), - str(is_primary), + current_app.logger.debug( + "[%i] Machine: [%i] %s : %r / %r", + route_id, + machine_id, + machine, + is_enabled, + is_primary, ) - app.logger.debug(str(all_routes["routes"][idx])) + current_app.logger.debug(str(all_routes.routes[idx])) # Set up the display code: enabled_display_enabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) enabled_display_disabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) primary_display_enabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) primary_display_disabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) # Set displays: @@ -1366,100 +1104,83 @@ def render_routes(): ) # Build a simple table for all non-exit routes: - failover_content += ( - """ + failover_content += f""" - - - + + + """ - ) failover_content += "
    MachineEnabledPrimary
    """ - + str(machine) - + """
    """ - + str(enabled_display) - + """
    """ - + str(primary_display) - + """
    {machine}
    {enabled_display}
    {primary_display}

    " failover_content += markup_post ############################################################################################## # Step 3: Get exit nodes only: - exit_node_list = [] - # Get a list of nodes with exit routes: - for route in all_routes["routes"]: - # For every exit route found, store the machine name in an array: - if route["prefix"] == "0.0.0.0/0" or route["prefix"] == "::/0": - if route["machine"]["givenName"] not in exit_node_list: - exit_node_list.append(route["machine"]["givenName"]) + # Get a set of nodes with exit routes: + exit_node_list = set( + route.machine.given_name + for route in all_routes.routes + if route.prefix in ("0.0.0.0/0", "::/0") + ) # Exit node display building: # Display by machine, not by route - exit_content = markup_pre + exit_title - exit_content += """

    - - - - - - - - """ + exit_content = ( + markup_pre + + exit_title + + """ +

    MachineEnabled
    + + + + + + + + """ + ) # Get exit route ID's for each node in the list: for node in exit_node_list: - node_exit_route_ids = [] + node_exit_route_ids: list[int] = [] exit_enabled = False exit_available = False machine_id = 0 - for route in all_routes["routes"]: - if route["prefix"] == "0.0.0.0/0" or route["prefix"] == "::/0": - if route["machine"]["givenName"] == node: - node_exit_route_ids.append(route["id"]) - machine_id = route["machine"]["id"] - exit_available = True - if route["enabled"]: - exit_enabled = True + for route in all_routes.routes: + if ( + route.prefix in ("0.0.0.0/0", "::/0") + and route.machine.given_name == node + ): + node_exit_route_ids.append(route.id) + machine_id = route.machine.id + exit_available = True + if route.enabled: + exit_enabled = True if exit_available: # Set up the display code: enabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) disabled = ( - "fiber_manual_record" + f"fiber_manual_record" ) # Set the displays: enabled_display = enabled if exit_enabled else disabled - exit_content += ( + exit_content += f""" + + + + """ - - - - - """ - ) exit_content += "
    MachineEnabled
    {node}
    {enabled_display}
    """ - + str(node) - + """
    """ - + str(enabled_display) - + """

    " + markup_post content = route_content + failover_content + exit_content diff --git a/server.py b/server.py index 82f72c4..3aab318 100644 --- a/server.py +++ b/server.py @@ -1,614 +1,399 @@ -import json -import logging -import os -import secrets -from datetime import datetime -from functools import wraps +"""Headscale WebUI Flask server.""" -import pytz -import requests -from dateutil import parser -from flask import Flask, Markup, escape, redirect, render_template, request, url_for -from flask_executor import Executor +import asyncio +import atexit +import datetime +import functools +from multiprocessing import Lock +from typing import Awaitable, Callable, Type, TypeVar + +import headscale_api.schema.headscale.v1 as schema +from aiohttp import ClientConnectionError +from apscheduler.schedulers.background import BackgroundScheduler # type: ignore +from betterproto import Message +from flask import Flask, redirect, render_template, url_for +from flask_pydantic.core import validate +from headscale_api.headscale import UnauthorizedError +from markupsafe import Markup +from pydantic import BaseModel, Field from werkzeug.middleware.proxy_fix import ProxyFix -import headscale -import helper import renderer +from auth import AuthManager +from config import Config, InitCheckError +from headscale import HeadscaleApi -# Global vars -# Colors: https://materializecss.com/color.html -COLOR = os.environ["COLOR"].replace('"', "").lower() -COLOR_NAV = COLOR + " darken-1" -COLOR_BTN = COLOR + " darken-3" -AUTH_TYPE = os.environ["AUTH_TYPE"].replace('"', "").lower() -LOG_LEVEL = os.environ["LOG_LEVEL"].replace('"', "").upper() -# If LOG_LEVEL is DEBUG, enable Flask debugging: -DEBUG_STATE = True if LOG_LEVEL == "DEBUG" else False -# Initiate the Flask application and logging: -app = Flask(__name__, static_url_path="/static") -match LOG_LEVEL: - case "DEBUG": - app.logger.setLevel(logging.DEBUG) - case "INFO": - app.logger.setLevel(logging.INFO) - case "WARNING": - app.logger.setLevel(logging.WARNING) - case "ERROR": - app.logger.setLevel(logging.ERROR) - case "CRITICAL": - app.logger.setLevel(logging.CRITICAL) - -executor = Executor(app) -app.wsgi_app = ProxyFix(app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1) -app.logger.info( - "Headscale-WebUI Version: " - + os.environ["APP_VERSION"] - + " / " - + os.environ["GIT_BRANCH"] -) -app.logger.info("LOG LEVEL SET TO %s", str(LOG_LEVEL)) -app.logger.info("DEBUG STATE: %s", str(DEBUG_STATE)) - -######################################################################################## -# Set Authentication type. Currently "OIDC" and "BASIC" -######################################################################################## -if AUTH_TYPE == "oidc": - # Currently using: flask-providers-oidc - https://pypi.org/project/flask-providers-oidc/ - # - # https://gist.github.com/thomasdarimont/145dc9aa857b831ff2eff221b79d179a/ - # https://www.authelia.com/integration/openid-connect/introduction/ - # https://github.com/steinarvk/flask_oidc_demo - app.logger.info("Loading OIDC libraries and configuring app...") - - DOMAIN_NAME = os.environ["DOMAIN_NAME"] - BASE_PATH = os.environ["SCRIPT_NAME"] if os.environ["SCRIPT_NAME"] != "/" else "" - OIDC_SECRET = os.environ["OIDC_CLIENT_SECRET"] - OIDC_CLIENT_ID = os.environ["OIDC_CLIENT_ID"] - OIDC_AUTH_URL = os.environ["OIDC_AUTH_URL"] - - # Construct client_secrets.json: - response = requests.get(str(OIDC_AUTH_URL)) - oidc_info = response.json() - app.logger.debug("JSON Dumps for OIDC_INFO: " + json.dumps(oidc_info)) - - client_secrets = json.dumps( - { - "web": { - "issuer": oidc_info["issuer"], - "auth_uri": oidc_info["authorization_endpoint"], - "client_id": OIDC_CLIENT_ID, - "client_secret": OIDC_SECRET, - "redirect_uris": [DOMAIN_NAME + BASE_PATH + "/oidc_callback"], - "userinfo_uri": oidc_info["userinfo_endpoint"], - "token_uri": oidc_info["token_endpoint"], - } - } +def create_tainted_app(app: Flask, error: InitCheckError) -> Flask: + """Run tainted version of the Headscale WebUI after encountering an error.""" + app.logger.error( + "Encountered error when trying to run initialization checks. Running in " + "tainted mode (only the error page is available). Correct all errors and " + "restart the server." ) - with open("/app/instance/secrets.json", "w+") as secrets_json: - secrets_json.write(client_secrets) - app.logger.debug("Client Secrets: ") - with open("/app/instance/secrets.json", "r+") as secrets_json: - app.logger.debug("/app/instances/secrets.json:") - app.logger.debug(secrets_json.read()) + @app.route("/") + def catchall_redirect(path: str): # pylint: disable=unused-argument + return redirect(url_for("error_page")) - app.config.update( - { - "SECRET_KEY": secrets.token_urlsafe(32), - "TESTING": DEBUG_STATE, - "DEBUG": DEBUG_STATE, - "OIDC_CLIENT_SECRETS": "/app/instance/secrets.json", - "OIDC_ID_TOKEN_COOKIE_SECURE": True, - "OIDC_REQUIRE_VERIFIED_EMAIL": False, - "OIDC_USER_INFO_ENABLED": True, - "OIDC_OPENID_REALM": "Headscale-WebUI", - "OIDC_SCOPES": ["openid", "profile", "email"], - "OIDC_INTROSPECTION_AUTH_METHOD": "client_secret_post", - } + @app.route("/error") + async def error_page(): + return render_template( + "error.html", + error_message=Markup( + "".join(sub_error.format_message() for sub_error in error) + ), + ) + + return app + + +async def create_app() -> Flask: + """Run Headscale WebUI Flask application. + + For arguments refer to `Flask.run()` function. + """ + app = Flask(__name__, static_url_path="/static") + app.wsgi_app = ProxyFix( # type: ignore[method-assign] + app.wsgi_app, x_for=1, x_proto=1, x_host=1, x_prefix=1 # type: ignore ) - from flask_oidc import OpenIDConnect + try: + # Try to initialize configuration from environment. + config = Config() # type: ignore - oidc = OpenIDConnect(app) + with app.app_context(): + # Try to create authentication handler (including loading auth config). + auth = AuthManager(config) -elif AUTH_TYPE == "basic": - # https://flask-basicauth.readthedocs.io/en/latest/ - app.logger.info("Loading basic auth libraries and configuring app...") - from flask_basicauth import BasicAuth + # Try to create Headscale API interface. + headscale = HeadscaleApi(config) - app.config["BASIC_AUTH_USERNAME"] = os.environ["BASIC_AUTH_USER"].replace('"', "") - app.config["BASIC_AUTH_PASSWORD"] = os.environ["BASIC_AUTH_PASS"] - app.config["BASIC_AUTH_FORCE"] = True + # Check health of Headscale API. + if not await headscale.health_check(): + raise ClientConnectionError(f"Health check failed on {headscale.base_url}") + except Exception as error: # pylint: disable=broad-exception-caught + # We want to catch broad exception to ensure no errors whatsoever went through + # the environment init. + with app.app_context(): + check_error = InitCheckError.from_exception(error) + return create_tainted_app(app, check_error) - basic_auth = BasicAuth(app) + app.logger.setLevel(config.log_level) + app.logger.info( + "Headscale-WebUI Version: %s / %s", config.app_version, config.git_branch + ) + app.logger.info("Logger level set to %s.", config.log_level) + app.logger.info("Debug state: %s", config.debug_mode) - ######################################################################################## - # Set Authentication type - Dynamically load function decorators - # https://stackoverflow.com/questions/17256602/assertionerror-view-function-mapping-is-overwriting-an-existing-endpoint-functi - ######################################################################################## - # Make a fake decorator for oidc.require_login - # If anyone knows a better way of doing this, please let me know. - class OpenIDConnect: - def require_login(self, view_func): - @wraps(view_func) - def decorated(*args, **kwargs): - return view_func(*args, **kwargs) + register_pages(app, headscale, auth) + register_api_endpoints(app, headscale, auth) + register_scheduler(app, headscale) - return decorated - - oidc = OpenIDConnect() - -else: - ######################################################################################## - # Set Authentication type - Dynamically load function decorators - # https://stackoverflow.com/questions/17256602/assertionerror-view-function-mapping-is-overwriting-an-existing-endpoint-functi - ######################################################################################## - # Make a fake decorator for oidc.require_login - # If anyone knows a better way of doing this, please let me know. - class OpenIDConnect: - def require_login(self, view_func): - @wraps(view_func) - def decorated(*args, **kwargs): - return view_func(*args, **kwargs) - - return decorated - - oidc = OpenIDConnect() + return app -######################################################################################## -# / pages - User-facing pages -######################################################################################## -@app.route("/") -@app.route("/overview") -@oidc.require_login -def overview_page(): - # Some basic sanity checks: - pass_checks = str(helper.load_checks()) - if pass_checks != "Pass": - return redirect(url_for(pass_checks)) +def register_pages(app: Flask, headscale: HeadscaleApi, auth: AuthManager): + """Register user-facing pages.""" + config = headscale.app_config - # Check if OIDC is enabled. If it is, display the buttons: - OIDC_NAV_DROPDOWN = Markup("") - OIDC_NAV_MOBILE = Markup("") - if AUTH_TYPE == "oidc": - email_address = oidc.user_getfield("email") - user_name = oidc.user_getfield("preferred_username") - name = oidc.user_getfield("name") - OIDC_NAV_DROPDOWN = renderer.oidc_nav_dropdown(user_name, email_address, name) - OIDC_NAV_MOBILE = renderer.oidc_nav_mobile(user_name, email_address, name) - - return render_template( - "overview.html", - render_page=renderer.render_overview(), - COLOR_NAV=COLOR_NAV, - COLOR_BTN=COLOR_BTN, - OIDC_NAV_DROPDOWN=OIDC_NAV_DROPDOWN, - OIDC_NAV_MOBILE=OIDC_NAV_MOBILE, + # Convenience short for render_defaults + render_defaults = functools.partial( + renderer.render_defaults, config, auth.oidc_handler ) + @app.route("/") + @app.route("/overview") + @auth.require_login + @headscale.key_check_guard + async def overview_page(): + return render_template( + "overview.html", + render_page=await renderer.render_overview(headscale), + **render_defaults(), + ) -@app.route("/routes", methods=("GET", "POST")) -@oidc.require_login -def routes_page(): - # Some basic sanity checks: - pass_checks = str(helper.load_checks()) - if pass_checks != "Pass": - return redirect(url_for(pass_checks)) + @app.route("/routes", methods=("GET", "POST")) + @auth.require_login + @headscale.key_check_guard + async def routes_page(): + return render_template( + "routes.html", + render_page=await renderer.render_routes(headscale), + **render_defaults(), + ) - # Check if OIDC is enabled. If it is, display the buttons: - OIDC_NAV_DROPDOWN = Markup("") - OIDC_NAV_MOBILE = Markup("") - INPAGE_SEARCH = Markup(renderer.render_search()) - if AUTH_TYPE == "oidc": - email_address = oidc.user_getfield("email") - user_name = oidc.user_getfield("preferred_username") - name = oidc.user_getfield("name") - OIDC_NAV_DROPDOWN = renderer.oidc_nav_dropdown(user_name, email_address, name) - OIDC_NAV_MOBILE = renderer.oidc_nav_mobile(user_name, email_address, name) + @app.route("/machines", methods=("GET", "POST")) + @auth.require_login + @headscale.key_check_guard + async def machines_page(): + return render_template( + "machines.html", + cards=await renderer.render_machines_cards(headscale), + headscale_server=config.hs_server, + inpage_search=renderer.render_search(), + **render_defaults(), + ) - return render_template( - "routes.html", - render_page=renderer.render_routes(), - COLOR_NAV=COLOR_NAV, - COLOR_BTN=COLOR_BTN, - OIDC_NAV_DROPDOWN=OIDC_NAV_DROPDOWN, - OIDC_NAV_MOBILE=OIDC_NAV_MOBILE, - ) + @app.route("/users", methods=("GET", "POST")) + @auth.require_login + @headscale.key_check_guard + async def users_page(): + return render_template( + "users.html", + cards=await renderer.render_users_cards(headscale), + inpage_search=renderer.render_search(), + ) + @app.route("/settings", methods=("GET", "POST")) + @auth.require_login + async def settings_page(): + return render_template( + "settings.html", + url=headscale.base_url, + BUILD_DATE=config.build_date, + APP_VERSION=config.app_version, + GIT_REPO_URL=config.git_repo_url, + GIT_COMMIT=config.git_commit, + GIT_BRANCH=config.git_branch, + HS_VERSION=config.hs_version, + **render_defaults(), + ) -@app.route("/machines", methods=("GET", "POST")) -@oidc.require_login -def machines_page(): - # Some basic sanity checks: - pass_checks = str(helper.load_checks()) - if pass_checks != "Pass": - return redirect(url_for(pass_checks)) + @app.route("/error") + async def error_page(): + """Error page redirect. - # Check if OIDC is enabled. If it is, display the buttons: - OIDC_NAV_DROPDOWN = Markup("") - OIDC_NAV_MOBILE = Markup("") - INPAGE_SEARCH = Markup(renderer.render_search()) - if AUTH_TYPE == "oidc": - email_address = oidc.user_getfield("email") - user_name = oidc.user_getfield("preferred_username") - name = oidc.user_getfield("name") - OIDC_NAV_DROPDOWN = renderer.oidc_nav_dropdown(user_name, email_address, name) - OIDC_NAV_MOBILE = renderer.oidc_nav_mobile(user_name, email_address, name) - - cards = renderer.render_machines_cards() - return render_template( - "machines.html", - cards=cards, - headscale_server=headscale.get_url(True), - COLOR_NAV=COLOR_NAV, - COLOR_BTN=COLOR_BTN, - OIDC_NAV_DROPDOWN=OIDC_NAV_DROPDOWN, - OIDC_NAV_MOBILE=OIDC_NAV_MOBILE, - INPAGE_SEARCH=INPAGE_SEARCH, - ) - - -@app.route("/users", methods=("GET", "POST")) -@oidc.require_login -def users_page(): - # Some basic sanity checks: - pass_checks = str(helper.load_checks()) - if pass_checks != "Pass": - return redirect(url_for(pass_checks)) - - # Check if OIDC is enabled. If it is, display the buttons: - OIDC_NAV_DROPDOWN = Markup("") - OIDC_NAV_MOBILE = Markup("") - INPAGE_SEARCH = Markup(renderer.render_search()) - if AUTH_TYPE == "oidc": - email_address = oidc.user_getfield("email") - user_name = oidc.user_getfield("preferred_username") - name = oidc.user_getfield("name") - OIDC_NAV_DROPDOWN = renderer.oidc_nav_dropdown(user_name, email_address, name) - OIDC_NAV_MOBILE = renderer.oidc_nav_mobile(user_name, email_address, name) - - cards = renderer.render_users_cards() - return render_template( - "users.html", - cards=cards, - COLOR_NAV=COLOR_NAV, - COLOR_BTN=COLOR_BTN, - OIDC_NAV_DROPDOWN=OIDC_NAV_DROPDOWN, - OIDC_NAV_MOBILE=OIDC_NAV_MOBILE, - INPAGE_SEARCH=INPAGE_SEARCH, - ) - - -@app.route("/settings", methods=("GET", "POST")) -@oidc.require_login -def settings_page(): - # Some basic sanity checks: - pass_checks = str(helper.load_checks()) - if pass_checks != "Pass" and pass_checks != "settings_page": - return redirect(url_for(pass_checks)) - - # Check if OIDC is enabled. If it is, display the buttons: - OIDC_NAV_DROPDOWN = Markup("") - OIDC_NAV_MOBILE = Markup("") - if AUTH_TYPE == "oidc": - email_address = oidc.user_getfield("email") - user_name = oidc.user_getfield("preferred_username") - name = oidc.user_getfield("name") - OIDC_NAV_DROPDOWN = renderer.oidc_nav_dropdown(user_name, email_address, name) - OIDC_NAV_MOBILE = renderer.oidc_nav_mobile(user_name, email_address, name) - - GIT_COMMIT_LINK = Markup( - "" - + str(os.environ["GIT_COMMIT"])[0:7] - + "" - ) - - return render_template( - "settings.html", - url=headscale.get_url(), - COLOR_NAV=COLOR_NAV, - COLOR_BTN=COLOR_BTN, - OIDC_NAV_DROPDOWN=OIDC_NAV_DROPDOWN, - OIDC_NAV_MOBILE=OIDC_NAV_MOBILE, - BUILD_DATE=os.environ["BUILD_DATE"], - APP_VERSION=os.environ["APP_VERSION"], - GIT_COMMIT=GIT_COMMIT_LINK, - GIT_BRANCH=os.environ["GIT_BRANCH"], - HS_VERSION=os.environ["HS_VERSION"], - ) - - -@app.route("/error") -@oidc.require_login -def error_page(): - if helper.access_checks() == "Pass": + Once we get out of tainted mode, we want to still have this route active so that + users refreshing the page get redirected to the overview page. + """ return redirect(url_for("overview_page")) - return render_template("error.html", ERROR_MESSAGE=Markup(helper.access_checks())) + @app.route("/logout") + @auth.require_login + @headscale.key_check_guard + async def logout_page(): + logout_url = auth.logout() + if logout_url is not None: + return redirect(logout_url) + return redirect(url_for("overview_page")) -@app.route("/logout") -def logout_page(): - if AUTH_TYPE == "oidc": - oidc.logout() - return redirect(url_for("overview_page")) +def register_api_endpoints(app: Flask, headscale: HeadscaleApi, auth: AuthManager): + """Register Headscale WebUI API endpoints.""" + RequestT = TypeVar("RequestT", bound=Message) + ResponseT = TypeVar("ResponseT", bound=Message) + def api_passthrough( + route: str, + request_type: Type[RequestT], + api_method: Callable[[RequestT], Awaitable[ResponseT | str]], + ): + """Passthrough the Headscale API in a concise form. -######################################################################################## -# /api pages -######################################################################################## + Arguments: + route -- Flask route to the API endpoint. + request_type -- request model (from headscale_api.schema). + api_method -- backend method to pass through the Flask request. + """ -######################################################################################## -# Headscale API Key Endpoints -######################################################################################## + async def api_passthrough_page(body: RequestT) -> ResponseT | str: + return await api_method(body) # type: ignore + api_passthrough_page.__name__ = route.replace("/", "_") + api_passthrough_page.__annotations__ = {"body": request_type} -@app.route("/api/test_key", methods=("GET", "POST")) -@oidc.require_login -def test_key_page(): - api_key = headscale.get_api_key() - url = headscale.get_url() + return app.route(route, methods=["POST"])( + auth.require_login( + headscale.key_check_guard( + validate()(api_passthrough_page) # type: ignore + ) + ) + ) - # Test the API key. If the test fails, return a failure. - status = headscale.test_api_key(url, api_key) - if status != 200: - return "Unauthenticated" + class TestKeyRequest(BaseModel): + """/api/test_key request.""" - renewed = headscale.renew_api_key(url, api_key) - app.logger.warning("The below statement will be TRUE if the key has been renewed, ") - app.logger.warning("or DOES NOT need renewal. False in all other cases") - app.logger.warning("Renewed: " + str(renewed)) - # The key works, let's renew it if it needs it. If it does, re-read the api_key from the file: - if renewed: - api_key = headscale.get_api_key() + api_key: str | None = Field( + None, description="API key to test. If None test the current key." + ) - key_info = headscale.get_api_key_info(url, api_key) + @app.route("/api/test_key", methods=("GET", "POST")) + @auth.require_login + @validate() + async def test_key_page(body: TestKeyRequest): + if body.api_key == "": + body.api_key = None - # Set the current timezone and local time - timezone = pytz.timezone(os.environ["TZ"] if os.environ["TZ"] else "UTC") - local_time = timezone.localize(datetime.now()) + async with headscale.session: + if not await headscale.test_api_key(body.api_key): + return "Unauthenticated", 401 - # Format the dates for easy readability - creation_parse = parser.parse(key_info["createdAt"]) - creation_local = creation_parse.astimezone(timezone) - creation_delta = local_time - creation_local - creation_print = helper.pretty_print_duration(creation_delta) - creation_time = ( - str(creation_local.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) - + " (" - + str(creation_print) - + ")" + ret = await headscale.renew_api_key() + match ret: + case None: + return "Unauthenticated", 401 + case schema.ApiKey(): + return ret + case _: + new_key_info = await headscale.get_api_key_info() + if new_key_info is None: + return "Unauthenticated", 401 + return new_key_info + + class SaveKeyRequest(BaseModel): + """/api/save_key request.""" + + api_key: str + + @app.route("/api/save_key", methods=["POST"]) + @auth.require_login + @validate() + async def save_key_page(body: SaveKeyRequest): + async with headscale.session: + # Test the new API key. + if not await headscale.test_api_key(body.api_key): + return "Key failed testing. Check your key.", 401 + + try: + headscale.api_key = body.api_key + except OSError: + return "Key did not save properly. Check logs.", 500 + + key_info = await headscale.get_api_key_info() + + if key_info is None: + return "Key saved but error occurred on key info retrieval." + + return ( + f'Key saved and tested: Key: "{key_info.prefix}", ' + f"expiration: {key_info.expiration}" + ) + + #################################################################################### + # Machine API Endpoints + #################################################################################### + + class UpdateRoutePageRequest(BaseModel): + """/api/update_route request.""" + + route_id: int + current_state: bool + + @app.route("/api/update_route", methods=["POST"]) + @auth.require_login + @validate() + async def update_route_page(body: UpdateRoutePageRequest): + if body.current_state: + return await headscale.disable_route( + schema.DisableRouteRequest(body.route_id) + ) + return await headscale.enable_route(schema.EnableRouteRequest(body.route_id)) + + api_passthrough( + "/api/machine_information", + schema.GetMachineRequest, + headscale.get_machine, + ) + api_passthrough( + "/api/delete_machine", + schema.DeleteMachineRequest, + headscale.delete_machine, + ) + api_passthrough( + "/api/rename_machine", + schema.RenameMachineRequest, + headscale.rename_machine, + ) + api_passthrough( + "/api/move_user", + schema.MoveMachineRequest, + headscale.move_machine, + ) + api_passthrough("/api/set_machine_tags", schema.SetTagsRequest, headscale.set_tags) + api_passthrough( + "/api/register_machine", + schema.RegisterMachineRequest, + headscale.register_machine, ) - expiration_parse = parser.parse(key_info["expiration"]) - expiration_local = expiration_parse.astimezone(timezone) - expiration_delta = expiration_local - local_time - expiration_print = helper.pretty_print_duration(expiration_delta, "expiry") - expiration_time = ( - str(expiration_local.strftime("%A %m/%d/%Y, %H:%M:%S")) - + " " - + str(timezone) - + " (" - + str(expiration_print) - + ")" + #################################################################################### + # User API Endpoints + #################################################################################### + + api_passthrough("/api/rename_user", schema.RenameUserRequest, headscale.rename_user) + api_passthrough("/api/add_user", schema.CreateUserRequest, headscale.create_user) + api_passthrough("/api/delete_user", schema.DeleteUserRequest, headscale.delete_user) + api_passthrough("/api/get_users", schema.ListUsersRequest, headscale.list_users) + + #################################################################################### + # Pre-Auth Key API Endpoints + #################################################################################### + + api_passthrough( + "/api/add_preauth_key", + schema.CreatePreAuthKeyRequest, + headscale.create_pre_auth_key, + ) + api_passthrough( + "/api/expire_preauth_key", + schema.ExpirePreAuthKeyRequest, + headscale.expire_pre_auth_key, + ) + api_passthrough( + "/api/build_preauthkey_table", + schema.ListPreAuthKeysRequest, + functools.partial(renderer.build_preauth_key_table, headscale), ) - key_info["expiration"] = expiration_time - key_info["createdAt"] = creation_time + #################################################################################### + # Route API Endpoints + #################################################################################### - message = json.dumps(key_info) - return message + api_passthrough("/api/get_routes", schema.GetRoutesRequest, headscale.get_routes) -@app.route("/api/save_key", methods=["POST"]) -@oidc.require_login -def save_key_page(): - json_response = request.get_json() - api_key = json_response["api_key"] - url = headscale.get_url() - file_written = headscale.set_api_key(api_key) - message = "" - - if file_written: - # Re-read the file and get the new API key and test it - api_key = headscale.get_api_key() - test_status = headscale.test_api_key(url, api_key) - if test_status == 200: - key_info = headscale.get_api_key_info(url, api_key) - expiration = key_info["expiration"] - message = "Key: '" + api_key + "', Expiration: " + expiration - # If the key was saved successfully, test it: - return "Key saved and tested: " + message - else: - return "Key failed testing. Check your key" - else: - return "Key did not save properly. Check logs" +scheduler_registered: bool = False +scheduler_lock = Lock() -######################################################################################## -# Machine API Endpoints -######################################################################################## -@app.route("/api/update_route", methods=["POST"]) -@oidc.require_login -def update_route_page(): - json_response = request.get_json() - route_id = escape(json_response["route_id"]) - url = headscale.get_url() - api_key = headscale.get_api_key() - current_state = json_response["current_state"] +def register_scheduler(app: Flask, headscale: HeadscaleApi): + """Register background scheduler.""" + global scheduler_registered # pylint: disable=global-statement - return headscale.update_route(url, api_key, route_id, current_state) + with scheduler_lock: + if scheduler_registered: + # For multi-worker set-up, only a single scheduler needs to be enabled. + return + + scheduler = BackgroundScheduler( + logger=app.logger, timezone=headscale.app_config.timezone + ) + scheduler.start() # type: ignore + + def renew_api_key(): + """Renew API key in a background job.""" + app.logger.info("Key renewal schedule triggered...") + try: + if app.ensure_sync(headscale.renew_api_key)() is None: # type: ignore + app.logger.error("Failed to renew the key. Check configuration.") + except UnauthorizedError: + app.logger.error("Current key is invalid. Check configuration.") + + scheduler.add_job( # type: ignore + renew_api_key, + "interval", + hours=1, + id="renew_api_key", + max_instances=1, + next_run_time=datetime.datetime.now(), + ) + + atexit.register(scheduler.shutdown) # type: ignore + + scheduler_registered = True -@app.route("/api/machine_information", methods=["POST"]) -@oidc.require_login -def machine_information_page(): - json_response = request.get_json() - machine_id = escape(json_response["id"]) - url = headscale.get_url() - api_key = headscale.get_api_key() +headscale_webui = asyncio.run(create_app()) - return headscale.get_machine_info(url, api_key, machine_id) - - -@app.route("/api/delete_machine", methods=["POST"]) -@oidc.require_login -def delete_machine_page(): - json_response = request.get_json() - machine_id = escape(json_response["id"]) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.delete_machine(url, api_key, machine_id) - - -@app.route("/api/rename_machine", methods=["POST"]) -@oidc.require_login -def rename_machine_page(): - json_response = request.get_json() - machine_id = escape(json_response["id"]) - new_name = escape(json_response["new_name"]) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.rename_machine(url, api_key, machine_id, new_name) - - -@app.route("/api/move_user", methods=["POST"]) -@oidc.require_login -def move_user_page(): - json_response = request.get_json() - machine_id = escape(json_response["id"]) - new_user = escape(json_response["new_user"]) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.move_user(url, api_key, machine_id, new_user) - - -@app.route("/api/set_machine_tags", methods=["POST"]) -@oidc.require_login -def set_machine_tags(): - json_response = request.get_json() - machine_id = escape(json_response["id"]) - machine_tags = json_response["tags_list"] - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.set_machine_tags(url, api_key, machine_id, machine_tags) - - -@app.route("/api/register_machine", methods=["POST"]) -@oidc.require_login -def register_machine(): - json_response = request.get_json() - machine_key = escape(json_response["key"]) - user = escape(json_response["user"]) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.register_machine(url, api_key, machine_key, user) - - -######################################################################################## -# User API Endpoints -######################################################################################## -@app.route("/api/rename_user", methods=["POST"]) -@oidc.require_login -def rename_user_page(): - json_response = request.get_json() - old_name = escape(json_response["old_name"]) - new_name = escape(json_response["new_name"]) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.rename_user(url, api_key, old_name, new_name) - - -@app.route("/api/add_user", methods=["POST"]) -@oidc.require_login -def add_user(): - json_response = request.get_json() - user_name = str(escape(json_response["name"])) - url = headscale.get_url() - api_key = headscale.get_api_key() - json_string = '{"name": "' + user_name + '"}' - - return headscale.add_user(url, api_key, json_string) - - -@app.route("/api/delete_user", methods=["POST"]) -@oidc.require_login -def delete_user(): - json_response = request.get_json() - user_name = str(escape(json_response["name"])) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.delete_user(url, api_key, user_name) - - -@app.route("/api/get_users", methods=["POST"]) -@oidc.require_login -def get_users_page(): - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.get_users(url, api_key) - - -######################################################################################## -# Pre-Auth Key API Endpoints -######################################################################################## -@app.route("/api/add_preauth_key", methods=["POST"]) -@oidc.require_login -def add_preauth_key(): - json_response = json.dumps(request.get_json()) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.add_preauth_key(url, api_key, json_response) - - -@app.route("/api/expire_preauth_key", methods=["POST"]) -@oidc.require_login -def expire_preauth_key(): - json_response = json.dumps(request.get_json()) - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.expire_preauth_key(url, api_key, json_response) - - -@app.route("/api/build_preauthkey_table", methods=["POST"]) -@oidc.require_login -def build_preauth_key_table(): - json_response = request.get_json() - user_name = str(escape(json_response["name"])) - - return renderer.build_preauth_key_table(user_name) - - -######################################################################################## -# Route API Endpoints -######################################################################################## -@app.route("/api/get_routes", methods=["POST"]) -@oidc.require_login -def get_route_info(): - url = headscale.get_url() - api_key = headscale.get_api_key() - - return headscale.get_routes(url, api_key) - - -######################################################################################## -# Main thread -######################################################################################## if __name__ == "__main__": - app.run(host="0.0.0.0", debug=DEBUG_STATE) + headscale_webui.run(host="0.0.0.0") diff --git a/static/js/custom.js b/static/js/custom.js index 3a34636..f86595c 100644 --- a/static/js/custom.js +++ b/static/js/custom.js @@ -165,55 +165,54 @@ document.addEventListener('DOMContentLoaded', function () { //----------------------------------------------------------- function test_key() { document.getElementById('test_modal_results').innerHTML = loading() + var api_key = document.getElementById('api_key').value; var data = $.ajax({ - type: "GET", + type: "POST", url: "api/test_key", + data: JSON.stringify({ "api_key": api_key }), + contentType: "application/json", success: function (response) { - if (response == "Unauthenticated") { - html = ` + document.getElementById('test_modal_results').innerHTML = `
    • - warning - Error -

      Key authentication failed. Check your key.

      + check + Success +

      Key authenticated with the Headscale server.

    +
    Key Information
    + + + + + + + + + + + + + + + + + + + +
    Key ID${response.id}
    Prefix${response.prefix}
    Expiration Date${response.expiration}
    Creation Date${response.createdAt}
    ` - document.getElementById('test_modal_results').innerHTML = html - } else { - json = JSON.parse(response) - var html = ` -
      -
    • - check - Success -

      Key authenticated with the Headscale server.

      -
    • -
    -
    Key Information
    - - - - - - - - - - - - - - - - - - - -
    Key ID${json['id']}
    Prefix${json['prefix']}
    Expiration Date${json['expiration']}
    Creation Date${json['createdAt']}
    - ` - document.getElementById('test_modal_results').innerHTML = html - } + }, + error: function (xhr, textStatus, errorThrown) { + document.getElementById('test_modal_results').innerHTML = ` +
      +
    • + warning + Error +

      Key authentication failed. Check your key.

      +
    • +
    + ` } }) @@ -241,7 +240,11 @@ function save_key() { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - M.toast({ html: 'Key saved. Testing...' }); + M.toast({ html: 'Testing key and saving...' }); + test_key(); + }, + error: function (xhr, textStatus, errorThrown) { + M.toast({ html: xhr.responseText }) test_key(); } }) @@ -328,8 +331,8 @@ function load_modal_add_preauth_key(user_name) {

    • Pre-Auth keys can be used to authenticate to Headscale without manually registering a machine. Use the flag --auth-key to do so.
    • -
    • "Ephemeral" keys can be used to register devices that frequently come on and drop off the newtork (for example, docker containers)
    • -
    • Keys that are "Reusable" can be used multiple times. Keys that are "One Time Use" will expire after their first use.
    • +
    • "Ephemeral" keys can be used to register devices that frequently come on and drop off the network (for example, docker containers)
    • +
    • Keys that are "Reusable" can be used multiple times. Keys that are "One Time Use" will expire after their first use.

    @@ -390,7 +393,7 @@ function load_modal_move_machine(machine_id) { document.getElementById('modal_confirm').className = "green btn-flat white-text" document.getElementById('modal_confirm').innerText = "Move" - var data = { "id": machine_id } + var data = { "machine_id": machine_id } $.ajax({ type: "POST", url: "api/machine_information", @@ -400,6 +403,8 @@ function load_modal_move_machine(machine_id) { $.ajax({ type: "POST", url: "api/get_users", + data: "{}", + contentType: "application/json", success: function (response) { modal = document.getElementById('card_modal'); modal_title = document.getElementById('modal_title'); @@ -458,7 +463,7 @@ function load_modal_delete_machine(machine_id) { document.getElementById('modal_confirm').className = "red btn-flat white-text" document.getElementById('modal_confirm').innerText = "Delete" - var data = { "id": machine_id } + var data = { "machine_id": machine_id } $.ajax({ type: "POST", url: "api/machine_information", @@ -508,7 +513,7 @@ function load_modal_rename_machine(machine_id) { document.getElementById('modal_title').innerHTML = "Loading..." document.getElementById('modal_confirm').className = "green btn-flat white-text" document.getElementById('modal_confirm').innerText = "Rename" - var data = { "id": machine_id } + var data = { "machine_id": machine_id } $.ajax({ type: "POST", url: "api/machine_information", @@ -562,6 +567,8 @@ function load_modal_add_machine() { $.ajax({ type: "POST", url: "api/get_users", + data: "{}", + contentType: "application/json", success: function (response) { modal_body = document.getElementById('default_add_new_machine_modal'); modal_confirm = document.getElementById('new_machine_modal_confirm'); @@ -613,8 +620,7 @@ function delete_chip(machine_id, chipsData) { for (let tag in chipsData) { formattedData[tag] = '"tag:' + chipsData[tag].tag + '"' } - var tags_list = '{"tags": [' + formattedData + ']}' - var data = { "id": machine_id, "tags_list": tags_list } + var data = { "machine_id": machine_id, "tags": formattedData } $.ajax({ type: "POST", @@ -636,8 +642,7 @@ function add_chip(machine_id, chipsData) { for (let tag in chipsData) { formattedData[tag] = '"tag:' + chipsData[tag].tag + '"' } - var tags_list = '{"tags": [' + formattedData + ']}' - var data = { "id": machine_id, "tags_list": tags_list } + var data = { "machine_id": machine_id, "tags": formattedData } $.ajax({ type: "POST", @@ -670,18 +675,17 @@ function add_machine() { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - if (response.machine) { - window.location.reload() - } - load_modal_generic("error", "Error adding machine", response.message) - return + window.location.reload() + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error adding machine", JSON.parse(xhr.responseText).message) } }) } function rename_machine(machine_id) { var new_name = document.getElementById('new_name_form').value; - var data = { "id": machine_id, "new_name": new_name }; + var data = { "machine_id": machine_id, "new_name": new_name }; // String to test against var regexIT = /[`!@#$%^&*()_+\=\[\]{};':"\\|,.<>\/?~]/; @@ -699,24 +703,22 @@ function rename_machine(machine_id) { data: JSON.stringify(data), contentType: "application/json", success: function (response) { + // Get the modal element and close it + modal_element = document.getElementById('card_modal') + M.Modal.getInstance(modal_element).close() - if (response.status == "True") { - // Get the modal element and close it - modal_element = document.getElementById('card_modal') - M.Modal.getInstance(modal_element).close() - - document.getElementById(machine_id + '-name-container').innerHTML = machine_id + ". " + escapeHTML(new_name) - M.toast({ html: 'Machine ' + machine_id + ' renamed to ' + escapeHTML(new_name) }); - } else { - load_modal_generic("error", "Error setting the machine name", "Headscale response: " + JSON.stringify(response.body.message)) - } + document.getElementById(machine_id + '-name-container').innerHTML = machine_id + ". " + escapeHTML(new_name) + M.toast({ html: 'Machine ' + machine_id + ' renamed to ' + escapeHTML(new_name) }); + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error setting the machine name", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } function move_machine(machine_id) { new_user = document.getElementById('move-select').value - var data = { "id": machine_id, "new_user": new_user }; + var data = { "machine_id": machine_id, "user": new_user }; $.ajax({ type: "POST", @@ -741,7 +743,7 @@ function move_machine(machine_id) { } function delete_machine(machine_id) { - var data = { "id": machine_id }; + var data = { "machine_id": machine_id }; $.ajax({ type: "POST", url: "api/delete_machine", @@ -756,6 +758,9 @@ function delete_machine(machine_id) { document.getElementById(machine_id + '-main-collapsible').className = "collapsible popout hide"; M.toast({ html: 'Machine deleted.' }); + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error deleting machine", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } @@ -864,6 +869,7 @@ function get_routes() { async: false, type: "POST", url: "api/get_routes", + data: "{}", contentType: "application/json", success: function (response) { console.log("Got all routes.") @@ -888,8 +894,8 @@ function toggle_failover_route_routespage(routeid, current_state, prefix, route_ var disabledTooltip = "Click to enable" var enabledTooltip = "Click to disable" - var disableState = "False" - var enableState = "True" + var disableState = false + var enableState = true var action_taken = "unchanged." $.ajax({ @@ -1028,25 +1034,24 @@ function rename_user(user_id, old_name) { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - if (response.status == "True") { - // Get the modal element and close it - modal_element = document.getElementById('card_modal') - M.Modal.getInstance(modal_element).close() + // Get the modal element and close it + modal_element = document.getElementById('card_modal') + M.Modal.getInstance(modal_element).close() - // Rename the user on the page: - document.getElementById(user_id + '-name-span').innerHTML = escapeHTML(new_name) + // Rename the user on the page: + document.getElementById(user_id + '-name-span').innerHTML = escapeHTML(new_name) - // Set the button to use the NEW name as the OLD name for both buttons - var rename_button_sm = document.getElementById(user_id + '-rename-user-sm') - rename_button_sm.setAttribute('onclick', 'load_modal_rename_user(' + user_id + ', "' + new_name + '")') - var rename_button_lg = document.getElementById(user_id + '-rename-user-lg') - rename_button_lg.setAttribute('onclick', 'load_modal_rename_user(' + user_id + ', "' + new_name + '")') + // Set the button to use the NEW name as the OLD name for both buttons + var rename_button_sm = document.getElementById(user_id + '-rename-user-sm') + rename_button_sm.setAttribute('onclick', 'load_modal_rename_user(' + user_id + ', "' + new_name + '")') + var rename_button_lg = document.getElementById(user_id + '-rename-user-lg') + rename_button_lg.setAttribute('onclick', 'load_modal_rename_user(' + user_id + ', "' + new_name + '")') - // Send the completion toast - M.toast({ html: "User '" + old_name + "' renamed to '" + new_name + "'." }) - } else { - load_modal_generic("error", "Error setting user name", "Headscale response: " + JSON.stringify(response.body.message)) - } + // Send the completion toast + M.toast({ html: "User '" + old_name + "' renamed to '" + new_name + "'." }) + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error setting user name", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } @@ -1059,19 +1064,17 @@ function delete_user(user_id, user_name) { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - if (response.status == "True") { - // Get the modal element and close it - modal_element = document.getElementById('card_modal') - M.Modal.getInstance(modal_element).close() + // Get the modal element and close it + modal_element = document.getElementById('card_modal') + M.Modal.getInstance(modal_element).close() - // When the machine is deleted, hide its collapsible: - document.getElementById(user_id + '-main-collapsible').className = "collapsible popout hide"; + // When the machine is deleted, hide its collapsible: + document.getElementById(user_id + '-main-collapsible').className = "collapsible popout hide"; - M.toast({ html: 'User deleted.' }); - } else { - // We errored. Decipher the error Headscale sent us and display it: - load_modal_generic("error", "Error deleting user", "Headscale response: " + JSON.stringify(response.body.message)) - } + M.toast({ html: 'User deleted.' }); + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error deleting user", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } @@ -1085,18 +1088,16 @@ function add_user() { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - if (response.status == "True") { - // Get the modal element and close it - modal_element = document.getElementById('card_modal') - M.Modal.getInstance(modal_element).close() + // Get the modal element and close it + modal_element = document.getElementById('card_modal') + M.Modal.getInstance(modal_element).close() - // Send the completion toast - M.toast({ html: "User '" + user_name + "' added to Headscale. Refreshing..." }) - window.location.reload() - } else { - // We errored. Decipher the error Headscale sent us and display it: - load_modal_generic("error", "Error adding user", "Headscale response: " + JSON.stringify(response.body.message)) - } + // Send the completion toast + M.toast({ html: "User '" + user_name + "' added to Headscale. Refreshing..." }) + window.location.reload() + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error adding user", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } @@ -1109,7 +1110,7 @@ function add_preauth_key(user_name) { // If there is no date, error: if (!date) { load_modal_generic("error", "Invalid Date", "Please enter a valid date"); return } - var data = { "user": user_name, "reusable": reusable, "ephemeral": ephemeral, "expiration": expiration } + var data = { "user": user_name, "reusable": reusable, "ephemeral": ephemeral, "expiration": expiration, "acl_tags": [] } $.ajax({ type: "POST", @@ -1117,33 +1118,31 @@ function add_preauth_key(user_name) { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - if (response.status == "True") { - // Send the completion toast - M.toast({ html: 'PreAuth key created in user ' + user_name }) - // If this is successfull, we should reload the table and close the modal: - var user_data = { "name": user_name } - $.ajax({ - type: "POST", - url: "api/build_preauthkey_table", - data: JSON.stringify(user_data), - contentType: "application/json", - success: function (table_data) { - table = document.getElementById(user_name + '-preauth-keys-collection') - table.innerHTML = table_data - // The tooltips need to be re-initialized afterwards: - M.Tooltip.init(document.querySelectorAll('.tooltipped')) - } - }) - // Get the modal element and close it - modal_element = document.getElementById('card_modal') - M.Modal.getInstance(modal_element).close() + // Send the completion toast + M.toast({ html: 'PreAuth key created in user ' + user_name }) + // If this is successful, we should reload the table and close the modal: + var user_data = { "user": user_name } + $.ajax({ + type: "POST", + url: "api/build_preauthkey_table", + data: JSON.stringify(user_data), + contentType: "application/json", + success: function (table_data) { + table = document.getElementById(user_name + '-preauth-keys-collection') + table.innerHTML = table_data + // The tooltips need to be re-initialized afterwards: + M.Tooltip.init(document.querySelectorAll('.tooltipped')) + } + }) + // Get the modal element and close it + modal_element = document.getElementById('card_modal') + M.Modal.getInstance(modal_element).close() - // The tooltips need to be re-initialized afterwards: - M.Tooltip.init(document.querySelectorAll('.tooltipped')) - - } else { - load_modal_generic("error", "Error adding a pre-auth key", "Headscale response: " + JSON.stringify(response.body.message)) - } + // The tooltips need to be re-initialized afterwards: + M.Tooltip.init(document.querySelectorAll('.tooltipped')) + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error adding a pre-auth key", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } @@ -1157,33 +1156,31 @@ function expire_preauth_key(user_name, key) { data: JSON.stringify(data), contentType: "application/json", success: function (response) { - if (response.status == "True") { - // Send the completion toast - M.toast({ html: 'PreAuth expired in ' + user_name }) - // If this is successfull, we should reload the table and close the modal: - var user_data = { "name": user_name } - $.ajax({ - type: "POST", - url: "api/build_preauthkey_table", - data: JSON.stringify(user_data), - contentType: "application/json", - success: function (table_data) { - table = document.getElementById(user_name + '-preauth-keys-collection') - table.innerHTML = table_data - // The tooltips need to be re-initialized afterwards: - M.Tooltip.init(document.querySelectorAll('.tooltipped')) - } - }) - // Get the modal element and close it - modal_element = document.getElementById('card_modal') - M.Modal.getInstance(modal_element).close() + // Send the completion toast + M.toast({ html: 'PreAuth expired in ' + user_name }) + // If this is successful, we should reload the table and close the modal: + var user_data = { "user": user_name } + $.ajax({ + type: "POST", + url: "api/build_preauthkey_table", + data: JSON.stringify(user_data), + contentType: "application/json", + success: function (table_data) { + table = document.getElementById(user_name + '-preauth-keys-collection') + table.innerHTML = table_data + // The tooltips need to be re-initialized afterwards: + M.Tooltip.init(document.querySelectorAll('.tooltipped')) + } + }) + // Get the modal element and close it + modal_element = document.getElementById('card_modal') + M.Modal.getInstance(modal_element).close() - // The tooltips need to be re-initialized afterwards: - M.Tooltip.init(document.querySelectorAll('.tooltipped')) - - } else { - load_modal_generic("error", "Error expiring a pre-auth key", "Headscale response: " + JSON.stringify(response.body.message)) - } + // The tooltips need to be re-initialized afterwards: + M.Tooltip.init(document.querySelectorAll('.tooltipped')) + }, + error: function (xhr, textStatus, errorThrown) { + load_modal_generic("error", "Error expiring a pre-auth key", "Headscale response: " + JSON.parse(xhr.responseText).message) } }) } diff --git a/templates/error.html b/templates/error.html index b08eb4c..80eb878 100644 --- a/templates/error.html +++ b/templates/error.html @@ -23,7 +23,7 @@ - +