From 5a633fdd8284dd1a2b6f3c95806f033ef4a4becf Mon Sep 17 00:00:00 2001 From: K900 Date: Tue, 14 Nov 2023 00:40:37 +0300 Subject: Packaging rework (#531) * fix: get rid of title view jank on latest beta * Count the number of installs for each plugin (#557) * Bump aiohttp from 3.8.4 to 3.8.5 in /backend (#558) * fix: include Decky version in request for index.js This avoids the If-Modified-Since logic in aiohttp and ensures Steam doesn't cache old JS, even if the timestamps are normalized. * fix: clean up shellcheck warnings in act runner script * fix: gitignore settings/ * fix: ensure state directories exist when running without the installer * feat: determine root directory correctly when running from in-tree * fix: fix typo in CI script * refactor: build a proper Python package with poetry * refactor: move decky_plugin under the poetry structure There's no need to special case it anymore, just treat it like any other Python module. * sandboxed_plugin: better fix, attempt 2 --------- Co-authored-by: AAGaming Co-authored-by: Party Wumpus <48649272+PartyWumpus@users.noreply.github.com> Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- backend/decky_loader/browser.py | 287 ++++++++ backend/decky_loader/customtypes.py | 6 + backend/decky_loader/helpers.py | 153 ++++ backend/decky_loader/injector.py | 438 +++++++++++ backend/decky_loader/loader.py | 200 +++++ .../decky_loader/localplatform/localplatform.py | 52 ++ .../localplatform/localplatformlinux.py | 201 ++++++ .../decky_loader/localplatform/localplatformwin.py | 55 ++ backend/decky_loader/localplatform/localsocket.py | 145 ++++ backend/decky_loader/main.py | 188 +++++ backend/decky_loader/plugin/method_call_request.py | 29 + backend/decky_loader/plugin/plugin.py | 84 +++ backend/decky_loader/plugin/sandboxed_plugin.py | 138 ++++ backend/decky_loader/settings.py | 60 ++ backend/decky_loader/updater.py | 238 ++++++ backend/decky_loader/utilities.py | 373 ++++++++++ backend/decky_plugin.py | 209 ++++++ backend/decky_plugin.pyi | 173 +++++ backend/main.py | 4 +- backend/poetry.lock | 802 +++++++++++++++++++++ backend/pyinstaller.spec | 30 + backend/pyproject.toml | 36 + backend/pyrightconfig.json | 3 - backend/requirements.txt | 5 - backend/src/browser.py | 275 ------- backend/src/customtypes.py | 6 - backend/src/helpers.py | 153 ---- backend/src/injector.py | 438 ----------- backend/src/loader.py | 200 ----- backend/src/localplatform/localplatform.py | 52 -- backend/src/localplatform/localplatformlinux.py | 192 ----- backend/src/localplatform/localplatformwin.py | 53 -- backend/src/localplatform/localsocket.py | 145 ---- backend/src/main.py | 191 ----- backend/src/plugin/method_call_request.py | 29 - backend/src/plugin/plugin.py | 84 --- backend/src/plugin/sandboxed_plugin.py | 138 ---- backend/src/settings.py | 60 -- backend/src/updater.py | 238 ------ backend/src/utilities.py | 373 ---------- 40 files changed, 3899 insertions(+), 2637 deletions(-) create mode 100644 backend/decky_loader/browser.py create mode 100644 backend/decky_loader/customtypes.py create mode 100644 backend/decky_loader/helpers.py create mode 100644 backend/decky_loader/injector.py create mode 100644 backend/decky_loader/loader.py create mode 100644 backend/decky_loader/localplatform/localplatform.py create mode 100644 backend/decky_loader/localplatform/localplatformlinux.py create mode 100644 backend/decky_loader/localplatform/localplatformwin.py create mode 100644 backend/decky_loader/localplatform/localsocket.py create mode 100644 backend/decky_loader/main.py create mode 100644 backend/decky_loader/plugin/method_call_request.py create mode 100644 backend/decky_loader/plugin/plugin.py create mode 100644 backend/decky_loader/plugin/sandboxed_plugin.py create mode 100644 backend/decky_loader/settings.py create mode 100644 backend/decky_loader/updater.py create mode 100644 backend/decky_loader/utilities.py create mode 100644 backend/decky_plugin.py create mode 100644 backend/decky_plugin.pyi create mode 100644 backend/poetry.lock create mode 100644 backend/pyinstaller.spec create mode 100644 backend/pyproject.toml delete mode 100644 backend/pyrightconfig.json delete mode 100644 backend/requirements.txt delete mode 100644 backend/src/browser.py delete mode 100644 backend/src/customtypes.py delete mode 100644 backend/src/helpers.py delete mode 100644 backend/src/injector.py delete mode 100644 backend/src/loader.py delete mode 100644 backend/src/localplatform/localplatform.py delete mode 100644 backend/src/localplatform/localplatformlinux.py delete mode 100644 backend/src/localplatform/localplatformwin.py delete mode 100644 backend/src/localplatform/localsocket.py delete mode 100644 backend/src/main.py delete mode 100644 backend/src/plugin/method_call_request.py delete mode 100644 backend/src/plugin/plugin.py delete mode 100644 backend/src/plugin/sandboxed_plugin.py delete mode 100644 backend/src/settings.py delete mode 100644 backend/src/updater.py delete mode 100644 backend/src/utilities.py (limited to 'backend') diff --git a/backend/decky_loader/browser.py b/backend/decky_loader/browser.py new file mode 100644 index 00000000..436e8abf --- /dev/null +++ b/backend/decky_loader/browser.py @@ -0,0 +1,287 @@ +# Full imports +import json +# import pprint +# from pprint import pformat + +# Partial imports +from aiohttp import ClientSession +from asyncio import sleep +from hashlib import sha256 +from io import BytesIO +from logging import getLogger +from os import R_OK, W_OK, path, listdir, access, mkdir +from shutil import rmtree +from time import time +from zipfile import ZipFile +from enum import IntEnum +from typing import Dict, List, TypedDict + +# Local modules +from .localplatform.localplatform import chown, chmod +from .loader import Loader, Plugins +from .helpers import get_ssl_context, download_remote_binary_to_path +from .settings import SettingsManager +from .injector import get_gamepadui_tab + +logger = getLogger("Browser") + +class PluginInstallType(IntEnum): + INSTALL = 0 + REINSTALL = 1 + UPDATE = 2 + +class PluginInstallRequest(TypedDict): + name: str + artifact: str + version: str + hash: str + install_type: PluginInstallType + +class PluginInstallContext: + def __init__(self, artifact: str, name: str, version: str, hash: str) -> None: + self.artifact = artifact + self.name = name + self.version = version + self.hash = hash + +class PluginBrowser: + def __init__(self, plugin_path: str, plugins: Plugins, loader: Loader, settings: SettingsManager) -> None: + self.plugin_path = plugin_path + self.plugins = plugins + self.loader = loader + self.settings = settings + self.install_requests: Dict[str, PluginInstallContext | List[PluginInstallContext]] = {} + + def _unzip_to_plugin_dir(self, zip: BytesIO, name: str, hash: str): + zip_hash = sha256(zip.getbuffer()).hexdigest() + if hash and (zip_hash != hash): + return False + zip_file = ZipFile(zip) + zip_file.extractall(self.plugin_path) + plugin_folder = self.find_plugin_folder(name) + assert plugin_folder is not None + plugin_dir = path.join(self.plugin_path, plugin_folder) + + if not chown(plugin_dir) or not chmod(plugin_dir, 555): + logger.error(f"chown/chmod exited with a non-zero exit code") + return False + return True + + async def _download_remote_binaries_for_plugin_with_name(self, pluginBasePath: str): + rv = False + try: + packageJsonPath = path.join(pluginBasePath, 'package.json') + pluginBinPath = path.join(pluginBasePath, 'bin') + + if access(packageJsonPath, R_OK): + with open(packageJsonPath, "r", encoding="utf-8") as f: + packageJson = json.load(f) + if "remote_binary" in packageJson and len(packageJson["remote_binary"]) > 0: + # create bin directory if needed. + chmod(pluginBasePath, 777) + if access(pluginBasePath, W_OK): + if not path.exists(pluginBinPath): + mkdir(pluginBinPath) + if not access(pluginBinPath, W_OK): + chmod(pluginBinPath, 777) + + rv = True + for remoteBinary in packageJson["remote_binary"]: + # Required Fields. If any Remote Binary is missing these fail the install. + binName = remoteBinary["name"] + binURL = remoteBinary["url"] + binHash = remoteBinary["sha256hash"] + if not await download_remote_binary_to_path(binURL, binHash, path.join(pluginBinPath, binName)): + rv = False + raise Exception(f"Error Downloading Remote Binary {binName}@{binURL} with hash {binHash} to {path.join(pluginBinPath, binName)}") + + chown(self.plugin_path) + chmod(pluginBasePath, 555) + else: + rv = True + logger.debug(f"No Remote Binaries to Download") + + except Exception as e: + rv = False + logger.debug(str(e)) + + return rv + + """Return the filename (only) for the specified plugin""" + def find_plugin_folder(self, name: str) -> str | None: + for folder in listdir(self.plugin_path): + try: + with open(path.join(self.plugin_path, folder, 'plugin.json'), "r", encoding="utf-8") as f: + plugin = json.load(f) + + if plugin['name'] == name: + return folder + except: + logger.debug(f"skipping {folder}") + + async def uninstall_plugin(self, name: str): + if self.loader.watcher: + self.loader.watcher.disabled = True + tab = await get_gamepadui_tab() + plugin_folder = self.find_plugin_folder(name) + assert plugin_folder is not None + plugin_dir = path.join(self.plugin_path, plugin_folder) + try: + logger.info("uninstalling " + name) + logger.info(" at dir " + plugin_dir) + logger.debug("calling frontend unload for %s" % str(name)) + res = await tab.evaluate_js(f"DeckyPluginLoader.unloadPlugin('{name}')") + logger.debug("result of unload from UI: %s", res) + # plugins_snapshot = self.plugins.copy() + # snapshot_string = pformat(plugins_snapshot) + # logger.debug("current plugins: %s", snapshot_string) + if name in self.plugins: + logger.debug("Plugin %s was found", name) + self.plugins[name].stop() + logger.debug("Plugin %s was stopped", name) + del self.plugins[name] + logger.debug("Plugin %s was removed from the dictionary", name) + self.cleanup_plugin_settings(name) + logger.debug("removing files %s" % str(name)) + rmtree(plugin_dir) + except FileNotFoundError: + logger.warning(f"Plugin {name} not installed, skipping uninstallation") + except Exception as e: + logger.error(f"Plugin {name} in {plugin_dir} was not uninstalled") + logger.error(f"Error at {str(e)}", exc_info=e) + if self.loader.watcher: + self.loader.watcher.disabled = False + + async def _install(self, artifact: str, name: str, version: str, hash: str): + # Will be set later in code + res_zip = None + + # Check if plugin is installed + isInstalled = False + # Preserve plugin order before removing plugin (uninstall alters the order and removes the plugin from the list) + current_plugin_order = self.settings.getSetting("pluginOrder")[:] + if self.loader.watcher: + self.loader.watcher.disabled = True + try: + pluginFolderPath = self.find_plugin_folder(name) + if pluginFolderPath: + isInstalled = True + except: + logger.error(f"Failed to determine if {name} is already installed, continuing anyway.") + + # Check if the file is a local file or a URL + if artifact.startswith("file://"): + logger.info(f"Installing {name} from local ZIP file (Version: {version})") + res_zip = BytesIO(open(artifact[7:], "rb").read()) + else: + logger.info(f"Installing {name} from URL (Version: {version})") + async with ClientSession() as client: + logger.debug(f"Fetching {artifact}") + res = await client.get(artifact, ssl=get_ssl_context()) + if res.status == 200: + logger.debug("Got 200. Reading...") + data = await res.read() + logger.debug(f"Read {len(data)} bytes") + res_zip = BytesIO(data) + else: + logger.fatal(f"Could not fetch from URL. {await res.text()}") + + storeUrl = "" + match self.settings.getSetting("store", 0): + case 0: storeUrl = "https://plugins.deckbrew.xyz/plugins" # default + case 1: storeUrl = "https://testing.deckbrew.xyz/plugins" # testing + case 2: storeUrl = self.settings.getSetting("store-url", "https://plugins.deckbrew.xyz/plugins") # custom + case _: storeUrl = "https://plugins.deckbrew.xyz/plugins" + logger.info(f"Incrementing installs for {name} from URL {storeUrl} (version {version})") + async with ClientSession() as client: + res = await client.post(storeUrl+f"/{name}/versions/{version}/increment?isUpdate={isInstalled}", ssl=get_ssl_context()) + if res.status != 200: + logger.error(f"Server did not accept install count increment request. code: {res.status}") + + # Check to make sure we got the file + if res_zip is None: + logger.fatal(f"Could not fetch {artifact}") + return + + # If plugin is installed, uninstall it + if isInstalled: + try: + logger.debug("Uninstalling existing plugin...") + await self.uninstall_plugin(name) + except: + logger.error(f"Plugin {name} could not be uninstalled.") + + # Install the plugin + logger.debug("Unzipping...") + ret = self._unzip_to_plugin_dir(res_zip, name, hash) + if ret: + plugin_folder = self.find_plugin_folder(name) + assert plugin_folder is not None + plugin_dir = path.join(self.plugin_path, plugin_folder) + ret = await self._download_remote_binaries_for_plugin_with_name(plugin_dir) + if ret: + logger.info(f"Installed {name} (Version: {version})") + if name in self.loader.plugins: + self.loader.plugins[name].stop() + self.loader.plugins.pop(name, None) + await sleep(1) + if not isInstalled: + current_plugin_order = self.settings.getSetting("pluginOrder") + current_plugin_order.append(name) + self.settings.setSetting("pluginOrder", current_plugin_order) + logger.debug("Plugin %s was added to the pluginOrder setting", name) + self.loader.import_plugin(path.join(plugin_dir, "main.py"), plugin_folder) + else: + logger.fatal(f"Failed Downloading Remote Binaries") + else: + logger.fatal(f"SHA-256 Mismatch!!!! {name} (Version: {version})") + if self.loader.watcher: + self.loader.watcher.disabled = False + + async def request_plugin_install(self, artifact: str, name: str, version: str, hash: str, install_type: PluginInstallType): + request_id = str(time()) + self.install_requests[request_id] = PluginInstallContext(artifact, name, version, hash) + tab = await get_gamepadui_tab() + await tab.open_websocket() + await tab.evaluate_js(f"DeckyPluginLoader.addPluginInstallPrompt('{name}', '{version}', '{request_id}', '{hash}', {install_type})") + + async def request_multiple_plugin_installs(self, requests: List[PluginInstallRequest]): + request_id = str(time()) + self.install_requests[request_id] = [PluginInstallContext(req['artifact'], req['name'], req['version'], req['hash']) for req in requests] + js_requests_parameter = ','.join([ + f"{{ name: '{req['name']}', version: '{req['version']}', hash: '{req['hash']}', install_type: {req['install_type']}}}" for req in requests + ]) + + tab = await get_gamepadui_tab() + await tab.open_websocket() + await tab.evaluate_js(f"DeckyPluginLoader.addMultiplePluginsInstallPrompt('{request_id}', [{js_requests_parameter}])") + + async def confirm_plugin_install(self, request_id: str): + requestOrRequests = self.install_requests.pop(request_id) + if isinstance(requestOrRequests, list): + [await self._install(req.artifact, req.name, req.version, req.hash) for req in requestOrRequests] + else: + await self._install(requestOrRequests.artifact, requestOrRequests.name, requestOrRequests.version, requestOrRequests.hash) + + def cancel_plugin_install(self, request_id: str): + self.install_requests.pop(request_id) + + def cleanup_plugin_settings(self, name: str): + """Removes any settings related to a plugin. Propably called when a plugin is uninstalled. + + Args: + name (string): The name of the plugin + """ + hidden_plugins = self.settings.getSetting("hiddenPlugins", []) + if name in hidden_plugins: + hidden_plugins.remove(name) + self.settings.setSetting("hiddenPlugins", hidden_plugins) + + + plugin_order = self.settings.getSetting("pluginOrder", []) + + if name in plugin_order: + plugin_order.remove(name) + self.settings.setSetting("pluginOrder", plugin_order) + + logger.debug("Removed any settings for plugin %s", name) diff --git a/backend/decky_loader/customtypes.py b/backend/decky_loader/customtypes.py new file mode 100644 index 00000000..84ebc235 --- /dev/null +++ b/backend/decky_loader/customtypes.py @@ -0,0 +1,6 @@ +from enum import Enum + +class UserType(Enum): + HOST_USER = 1 + EFFECTIVE_USER = 2 + ROOT = 3 \ No newline at end of file diff --git a/backend/decky_loader/helpers.py b/backend/decky_loader/helpers.py new file mode 100644 index 00000000..e3770c63 --- /dev/null +++ b/backend/decky_loader/helpers.py @@ -0,0 +1,153 @@ +import re +import ssl +import uuid +import os +import subprocess +from hashlib import sha256 +from io import BytesIO + +import certifi +from aiohttp.web import Request, Response, middleware +from aiohttp.typedefs import Handler +from aiohttp import ClientSession +from .localplatform import localplatform +from .customtypes import UserType +from logging import getLogger + +REMOTE_DEBUGGER_UNIT = "steam-web-debug-portforward.service" + +# global vars +csrf_token = str(uuid.uuid4()) +ssl_ctx = ssl.create_default_context(cafile=certifi.where()) + +assets_regex = re.compile("^/plugins/.*/assets/.*") +frontend_regex = re.compile("^/frontend/.*") +logger = getLogger("Main") + +def get_ssl_context(): + return ssl_ctx + +def get_csrf_token(): + return csrf_token + +@middleware +async def csrf_middleware(request: Request, handler: Handler): + if str(request.method) == "OPTIONS" or request.headers.get('Authentication') == csrf_token or str(request.rel_url) == "/auth/token" or str(request.rel_url).startswith("/plugins/load_main/") or str(request.rel_url).startswith("/static/") or str(request.rel_url).startswith("/steam_resource/") or str(request.rel_url).startswith("/frontend/") or assets_regex.match(str(request.rel_url)) or frontend_regex.match(str(request.rel_url)): + return await handler(request) + return Response(text='Forbidden', status=403) + +# Get the default homebrew path unless a home_path is specified. home_path argument is deprecated +def get_homebrew_path() -> str: + return localplatform.get_unprivileged_path() + +# Recursively create path and chown as user +def mkdir_as_user(path: str): + path = os.path.realpath(path) + os.makedirs(path, exist_ok=True) + localplatform.chown(path) + +# Fetches the version of loader +def get_loader_version() -> str: + try: + with open(os.path.join(os.getcwd(), ".loader.version"), "r", encoding="utf-8") as version_file: + return version_file.readline().strip() + except Exception as e: + logger.warn(f"Failed to execute get_loader_version(): {str(e)}") + return "unknown" + +# returns the appropriate system python paths +def get_system_pythonpaths() -> list[str]: + try: + # run as normal normal user if on linux to also include user python paths + proc = subprocess.run(["python3" if localplatform.ON_LINUX else "python", "-c", "import sys; print('\\n'.join(x for x in sys.path if x))"], + # TODO make this less insane + capture_output=True, user=localplatform.localplatform._get_user_id() if localplatform.ON_LINUX else None, env={} if localplatform.ON_LINUX else None) # type: ignore + return [x.strip() for x in proc.stdout.decode().strip().split("\n")] + except Exception as e: + logger.warn(f"Failed to execute get_system_pythonpaths(): {str(e)}") + return [] + +# Download Remote Binaries to local Plugin +async def download_remote_binary_to_path(url: str, binHash: str, path: str) -> bool: + rv = False + try: + if os.access(os.path.dirname(path), os.W_OK): + async with ClientSession() as client: + res = await client.get(url, ssl=get_ssl_context()) + if res.status == 200: + data = BytesIO(await res.read()) + remoteHash = sha256(data.getbuffer()).hexdigest() + if binHash == remoteHash: + data.seek(0) + with open(path, 'wb') as f: + f.write(data.getbuffer()) + rv = True + else: + raise Exception(f"Fatal Error: Hash Mismatch for remote binary {path}@{url}") + else: + rv = False + except: + rv = False + + return rv + +# Deprecated +def set_user(): + pass + +# Deprecated +def set_user_group() -> str: + return get_user_group() + +######### +# Below is legacy code, provided for backwards compatibility. This will break on windows +######### + +# Get the user id hosting the plugin loader +def get_user_id() -> int: + return localplatform.localplatform._get_user_id() # pyright: ignore [reportPrivateUsage] + +# Get the user hosting the plugin loader +def get_user() -> str: + return localplatform.localplatform._get_user() # pyright: ignore [reportPrivateUsage] + +# Get the effective user id of the running process +def get_effective_user_id() -> int: + return localplatform.localplatform._get_effective_user_id() # pyright: ignore [reportPrivateUsage] + +# Get the effective user of the running process +def get_effective_user() -> str: + return localplatform.localplatform._get_effective_user() # pyright: ignore [reportPrivateUsage] + +# Get the effective user group id of the running process +def get_effective_user_group_id() -> int: + return localplatform.localplatform._get_effective_user_group_id() # pyright: ignore [reportPrivateUsage] + +# Get the effective user group of the running process +def get_effective_user_group() -> str: + return localplatform.localplatform._get_effective_user_group() # pyright: ignore [reportPrivateUsage] + +# Get the user owner of the given file path. +def get_user_owner(file_path: str) -> str: + return localplatform.localplatform._get_user_owner(file_path) # pyright: ignore [reportPrivateUsage] + +# Get the user group of the given file path, or the user group hosting the plugin loader +def get_user_group(file_path: str | None = None) -> str: + return localplatform.localplatform._get_user_group(file_path) # pyright: ignore [reportPrivateUsage] + +# Get the group id of the user hosting the plugin loader +def get_user_group_id() -> int: + return localplatform.localplatform._get_user_group_id() # pyright: ignore [reportPrivateUsage] + +# Get the default home path unless a user is specified +def get_home_path(username: str | None = None) -> str: + return localplatform.get_home_path(UserType.ROOT if username == "root" else UserType.HOST_USER) + +async def is_systemd_unit_active(unit_name: str) -> bool: + return await localplatform.service_active(unit_name) + +async def stop_systemd_unit(unit_name: str) -> bool: + return await localplatform.service_stop(unit_name) + +async def start_systemd_unit(unit_name: str) -> bool: + return await localplatform.service_start(unit_name) diff --git a/backend/decky_loader/injector.py b/backend/decky_loader/injector.py new file mode 100644 index 00000000..a217f689 --- /dev/null +++ b/backend/decky_loader/injector.py @@ -0,0 +1,438 @@ +# Injector code from https://github.com/SteamDeckHomebrew/steamdeck-ui-inject. More info on how it works there. + +from asyncio import sleep +from logging import getLogger +from typing import Any, Callable, List, TypedDict, Dict + +from aiohttp import ClientSession +from aiohttp.client_exceptions import ClientConnectorError, ClientOSError +from asyncio.exceptions import TimeoutError +import uuid + +BASE_ADDRESS = "http://localhost:8080" + +logger = getLogger("Injector") + +class _TabResponse(TypedDict): + title: str + id: str + url: str + webSocketDebuggerUrl: str + +class Tab: + cmd_id = 0 + + def __init__(self, res: _TabResponse) -> None: + self.title: str = res["title"] + self.id: str = res["id"] + self.url: str = res["url"] + self.ws_url: str = res["webSocketDebuggerUrl"] + + self.websocket = None + self.client = None + + async def open_websocket(self): + self.client = ClientSession() + self.websocket = await self.client.ws_connect(self.ws_url) # type: ignore + + async def close_websocket(self): + if self.websocket: + await self.websocket.close() + if self.client: + await self.client.close() + + async def listen_for_message(self): + if self.websocket: + async for message in self.websocket: + data = message.json() + yield data + logger.warn(f"The Tab {self.title} socket has been disconnected while listening for messages.") + await self.close_websocket() + + async def _send_devtools_cmd(self, dc: Dict[str, Any], receive: bool = True): + if self.websocket: + self.cmd_id += 1 + dc["id"] = self.cmd_id + await self.websocket.send_json(dc) + if receive: + async for msg in self.listen_for_message(): + if "id" in msg and msg["id"] == dc["id"]: + return msg + return None + raise RuntimeError("Websocket not opened") + + async def evaluate_js(self, js: str, run_async: bool | None = False, manage_socket: bool | None = True, get_result: bool = True): + try: + if manage_socket: + await self.open_websocket() + + res = await self._send_devtools_cmd({ + "method": "Runtime.evaluate", + "params": { + "expression": js, + "userGesture": True, + "awaitPromise": run_async + } + }, get_result) + + finally: + if manage_socket: + await self.close_websocket() + return res + + async def has_global_var(self, var_name: str, manage_socket: bool = True): + res = await self.evaluate_js(f"window['{var_name}'] !== null && window['{var_name}'] !== undefined", False, manage_socket) + assert res is not None + + if not "result" in res or not "result" in res["result"] or not "value" in res["result"]["result"]: + return False + + return res["result"]["result"]["value"] + + async def close(self, manage_socket: bool = True): + try: + if manage_socket: + await self.open_websocket() + + res = await self._send_devtools_cmd({ + "method": "Page.close", + }, False) + + finally: + if manage_socket: + await self.close_websocket() + return res + + async def enable(self): + """ + Enables page domain notifications. + """ + await self._send_devtools_cmd({ + "method": "Page.enable", + }, False) + + async def disable(self): + """ + Disables page domain notifications. + """ + await self._send_devtools_cmd({ + "method": "Page.disable", + }, False) + + async def refresh(self, manage_socket: bool = True): + try: + if manage_socket: + await self.open_websocket() + + await self._send_devtools_cmd({ + "method": "Page.reload", + }, False) + + finally: + if manage_socket: + await self.close_websocket() + + return + async def reload_and_evaluate(self, js: str, manage_socket: bool = True): + """ + Reloads the current tab, with JS to run on load via debugger + """ + try: + if manage_socket: + await self.open_websocket() + + await self._send_devtools_cmd({ + "method": "Debugger.enable" + }, True) + + await self._send_devtools_cmd({ + "method": "Runtime.evaluate", + "params": { + "expression": "location.reload();", + "userGesture": True, + "awaitPromise": False + } + }, False) + + breakpoint_res = await self._send_devtools_cmd({ + "method": "Debugger.setInstrumentationBreakpoint", + "params": { + "instrumentation": "beforeScriptExecution" + } + }, True) + + assert breakpoint_res is not None + + logger.info(breakpoint_res) + + # Page finishes loading when breakpoint hits + + for _ in range(20): + # this works around 1/5 of the time, so just send it 8 times. + # the js accounts for being injected multiple times allowing only one instance to run at a time anyway + await self._send_devtools_cmd({ + "method": "Runtime.evaluate", + "params": { + "expression": js, + "userGesture": True, + "awaitPromise": False + } + }, False) + + await self._send_devtools_cmd({ + "method": "Debugger.removeBreakpoint", + "params": { + "breakpointId": breakpoint_res["result"]["breakpointId"] + } + }, False) + + for _ in range(4): + await self._send_devtools_cmd({ + "method": "Debugger.resume" + }, False) + + await self._send_devtools_cmd({ + "method": "Debugger.disable" + }, True) + + finally: + if manage_socket: + await self.close_websocket() + return + + async def add_script_to_evaluate_on_new_document(self, js: str, add_dom_wrapper: bool = True, manage_socket: bool = True, get_result: bool = True): + """ + How the underlying call functions is not particularly clear from the devtools docs, so stealing puppeteer's description: + + Adds a function which would be invoked in one of the following scenarios: + * whenever the page is navigated + * whenever the child frame is attached or navigated. In this case, the + function is invoked in the context of the newly attached frame. + + The function is invoked after the document was created but before any of + its scripts were run. This is useful to amend the JavaScript environment, + e.g. to seed `Math.random`. + + Parameters + ---------- + js : str + The script to evaluate on new document + add_dom_wrapper : bool + True to wrap the script in a wait for the 'DOMContentLoaded' event. + DOM will usually not exist when this execution happens, + so it is necessary to delay til DOM is loaded if you are modifying it + manage_socket : bool + True to have this function handle opening/closing the websocket for this tab + get_result : bool + True to wait for the result of this call + + Returns + ------- + int or None + The identifier of the script added, used to remove it later. + (see remove_script_to_evaluate_on_new_document below) + None is returned if `get_result` is False + """ + try: + + wrappedjs = """ + function scriptFunc() { + {js} + } + if (document.readyState === 'loading') { + addEventListener('DOMContentLoaded', () => { + scriptFunc(); + }); + } else { + scriptFunc(); + } + """.format(js=js) if add_dom_wrapper else js + + if manage_socket: + await self.open_websocket() + + res = await self._send_devtools_cmd({ + "method": "Page.addScriptToEvaluateOnNewDocument", + "params": { + "source": wrappedjs + } + }, get_result) + + finally: + if manage_socket: + await self.close_websocket() + return res + + async def remove_script_to_evaluate_on_new_document(self, script_id: str, manage_socket: bool = True): + """ + Removes a script from a page that was added with `add_script_to_evaluate_on_new_document` + + Parameters + ---------- + script_id : int + The identifier of the script to remove (returned from `add_script_to_evaluate_on_new_document`) + """ + + try: + if manage_socket: + await self.open_websocket() + + await self._send_devtools_cmd({ + "method": "Page.removeScriptToEvaluateOnNewDocument", + "params": { + "identifier": script_id + } + }, False) + + finally: + if manage_socket: + await self.close_websocket() + + async def has_element(self, element_name: str, manage_socket: bool = True): + res = await self.evaluate_js(f"document.getElementById('{element_name}') != null", False, manage_socket) + assert res is not None + + if not "result" in res or not "result" in res["result"] or not "value" in res["result"]["result"]: + return False + + return res["result"]["result"]["value"] + + async def inject_css(self, style: str, manage_socket: bool = True): + try: + css_id = str(uuid.uuid4()) + + result = await self.evaluate_js( + f""" + (function() {{ + const style = document.createElement('style'); + style.id = "{css_id}"; + document.head.append(style); + style.textContent = `{style}`; + }})() + """, False, manage_socket) + + assert result is not None + + if "exceptionDetails" in result["result"]: + return { + "success": False, + "result": result["result"] + } + + return { + "success": True, + "result": css_id + } + except Exception as e: + return { + "success": False, + "result": e + } + + async def remove_css(self, css_id: str, manage_socket: bool = True): + try: + result = await self.evaluate_js( + f""" + (function() {{ + let style = document.getElementById("{css_id}"); + + if (style.nodeName.toLowerCase() == 'style') + style.parentNode.removeChild(style); + }})() + """, False, manage_socket) + + assert result is not None + + if "exceptionDetails" in result["result"]: + return { + "success": False, + "result": result + } + + return { + "success": True + } + except Exception as e: + return { + "success": False, + "result": e + } + + async def get_steam_resource(self, url: str): + res = await self.evaluate_js(f'(async function test() {{ return await (await fetch("{url}")).text() }})()', True) + assert res is not None + return res["result"]["result"]["value"] + + def __repr__(self): + return self.title + + +async def get_tabs() -> List[Tab]: + res = {} + + na = False + while True: + try: + async with ClientSession() as web: + res = await web.get(f"{BASE_ADDRESS}/json", timeout=3) + except ClientConnectorError: + if not na: + logger.debug("Steam isn't available yet. Wait for a moment...") + na = True + await sleep(5) + except ClientOSError: + logger.warn(f"The request to {BASE_ADDRESS}/json was reset") + await sleep(1) + except TimeoutError: + logger.warn(f"The request to {BASE_ADDRESS}/json timed out") + await sleep(1) + else: + break + + if res.status == 200: + r = await res.json() + return [Tab(i) for i in r] + else: + raise Exception(f"/json did not return 200. {await res.text()}") + + +async def get_tab(tab_name: str) -> Tab: + tabs = await get_tabs() + tab = next((i for i in tabs if i.title == tab_name), None) + if not tab: + raise ValueError(f"Tab {tab_name} not found") + return tab + +async def get_tab_lambda(test: Callable[[Tab], bool]) -> Tab: + tabs = await get_tabs() + tab = next((i for i in tabs if test(i)), None) + if not tab: + raise ValueError(f"Tab not found by lambda") + return tab + +SHARED_CTX_NAMES = ["SharedJSContext", "Steam Shared Context presented by Valveā„¢", "Steam", "SP"] +CLOSEABLE_URLS = ["about:blank", "data:text/html,%3Cbody%3E%3C%2Fbody%3E"] # Closing anything other than these *really* likes to crash Steam +DO_NOT_CLOSE_URL = "Valve Steam Gamepad/default" # Steam Big Picture Mode tab + +def tab_is_gamepadui(t: Tab) -> bool: + return "https://steamloopback.host/routes/" in t.url and t.title in SHARED_CTX_NAMES + +async def get_gamepadui_tab() -> Tab: + tabs = await get_tabs() + tab = next((i for i in tabs if tab_is_gamepadui(i)), None) + if not tab: + raise ValueError(f"GamepadUI Tab not found") + return tab + +async def inject_to_tab(tab_name: str, js: str, run_async: bool = False): + tab = await get_tab(tab_name) + + return await tab.evaluate_js(js, run_async) + +async def close_old_tabs(): + tabs = await get_tabs() + for t in tabs: + if not t.title or (t.title not in SHARED_CTX_NAMES and any(url in t.url for url in CLOSEABLE_URLS) and DO_NOT_CLOSE_URL not in t.url): + logger.debug("Closing tab: " + getattr(t, "title", "Untitled")) + await t.close() + await sleep(0.5) diff --git a/backend/decky_loader/loader.py b/backend/decky_loader/loader.py new file mode 100644 index 00000000..7567912c --- /dev/null +++ b/backend/decky_loader/loader.py @@ -0,0 +1,200 @@ +from __future__ import annotations +from asyncio import AbstractEventLoop, Queue, sleep +from json.decoder import JSONDecodeError +from logging import getLogger +from os import listdir, path +from pathlib import Path +from traceback import print_exc +from typing import Any, Tuple + +from aiohttp import web +from os.path import exists +from watchdog.events import RegexMatchingEventHandler, DirCreatedEvent, DirModifiedEvent, FileCreatedEvent, FileModifiedEvent # type: ignore +from watchdog.observers import Observer # type: ignore + +from typing import TYPE_CHECKING +if TYPE_CHECKING: + from .main import PluginManager + +from .injector import get_gamepadui_tab +from .plugin.plugin import PluginWrapper + +Plugins = dict[str, PluginWrapper] +ReloadQueue = Queue[Tuple[str, str, bool | None] | Tuple[str, str]] + +#TODO: Remove placeholder method +async def log_plugin_emitted_message(message: Any): + getLogger().debug(f"EMITTED MESSAGE: " + str(message)) + +class FileChangeHandler(RegexMatchingEventHandler): + def __init__(self, queue: ReloadQueue, plugin_path: str) -> None: + super().__init__(regexes=[r'^.*?dist\/index\.js$', r'^.*?main\.py$']) # type: ignore + self.logger = getLogger("file-watcher") + self.plugin_path = plugin_path + self.queue = queue + self.disabled = True + + def maybe_reload(self, src_path: str): + if self.disabled: + return + plugin_dir = Path(path.relpath(src_path, self.plugin_path)).parts[0] + if exists(path.join(self.plugin_path, plugin_dir, "plugin.json")): + self.queue.put_nowait((path.join(self.plugin_path, plugin_dir, "main.py"), plugin_dir, True)) + + def on_created(self, event: DirCreatedEvent | FileCreatedEvent): + src_path = event.src_path + if "__pycache__" in src_path: + return + + # check to make sure this isn't a directory + if path.isdir(src_path): + return + + # get the directory name of the plugin so that we can find its "main.py" and reload it; the + # file that changed is not necessarily the one that needs to be reloaded + self.logger.debug(f"file created: {src_path}") + self.maybe_reload(src_path) + + def on_modified(self, event: DirModifiedEvent | FileModifiedEvent): + src_path = event.src_path + if "__pycache__" in src_path: + return + + # check to make sure this isn't a directory + if path.isdir(src_path): + return + + # get the directory name of the plugin so that we can find its "main.py" and reload it; the + # file that changed is not necessarily the one that needs to be reloaded + self.logger.debug(f"file modified: {src_path}") + self.maybe_reload(src_path) + +class Loader: + def __init__(self, server_instance: PluginManager, plugin_path: str, loop: AbstractEventLoop, live_reload: bool = False) -> None: + self.loop = loop + self.logger = getLogger("Loader") + self.plugin_path = plugin_path + self.logger.info(f"plugin_path: {self.plugin_path}") + self.plugins: Plugins = {} + self.watcher = None + self.live_reload = live_reload + self.reload_queue: ReloadQueue = Queue() + self.loop.create_task(self.handle_reloads()) + + if live_reload: + self.observer = Observer() + self.watcher = FileChangeHandler(self.reload_queue, plugin_path) + self.observer.schedule(self.watcher, self.plugin_path, recursive=True) # type: ignore + self.observer.start() + self.loop.create_task(self.enable_reload_wait()) + + server_instance.web_app.add_routes([ + web.get("/frontend/{path:.*}", self.handle_frontend_assets), + web.get("/locales/{path:.*}", self.handle_frontend_locales), + web.get("/plugins", self.get_plugins), + web.get("/plugins/{plugin_name}/frontend_bundle", self.handle_frontend_bundle), + web.post("/plugins/{plugin_name}/methods/{method_name}", self.handle_plugin_method_call), + web.get("/plugins/{plugin_name}/assets/{path:.*}", self.handle_plugin_frontend_assets), + web.post("/plugins/{plugin_name}/reload", self.handle_backend_reload_request) + ]) + + async def enable_reload_wait(self): + if self.live_reload: + await sleep(10) + if self.watcher: + self.logger.info("Hot reload enabled") + self.watcher.disabled = False + + async def handle_frontend_assets(self, request: web.Request): + file = Path(__file__).parents[1].joinpath("static").joinpath(request.match_info["path"]) + return web.FileResponse(file, headers={"Cache-Control": "no-cache"}) + + async def handle_frontend_locales(self, request: web.Request): + req_lang = request.match_info["path"] + file = Path(__file__).parents[1].joinpath("locales").joinpath(req_lang) + if exists(file): + return web.FileResponse(file, headers={"Cache-Control": "no-cache", "Content-Type": "application/json"}) + else: + self.logger.info(f"Language {req_lang} not available, returning an empty dictionary") + return web.json_response(data={}, headers={"Cache-Control": "no-cache"}) + + async def get_plugins(self, request: web.Request): + plugins = list(self.plugins.values()) + return web.json_response([{"name": str(i), "version": i.version} for i in plugins]) + + async def handle_plugin_frontend_assets(self, request: web.Request): + plugin = self.plugins[request.match_info["plugin_name"]] + file = path.join(self.plugin_path, plugin.plugin_directory, "dist/assets", request.match_info["path"]) + + return web.FileResponse(file, headers={"Cache-Control": "no-cache"}) + + async def handle_frontend_bundle(self, request: web.Request): + plugin = self.plugins[request.match_info["plugin_name"]] + + with open(path.join(self.plugin_path, plugin.plugin_directory, "dist/index.js"), "r", encoding="utf-8") as bundle: + return web.Response(text=bundle.read(), content_type="application/javascript") + + def import_plugin(self, file: str, plugin_directory: str, refresh: bool | None = False, batch: bool | None = False): + try: + plugin = PluginWrapper(file, plugin_directory, self.plugin_path) + if plugin.name in self.plugins: + if not "debug" in plugin.flags and refresh: + self.logger.info(f"Plugin {plugin.name} is already loaded and has requested to not be re-loaded") + return + else: + self.plugins[plugin.name].stop() + self.plugins.pop(plugin.name, None) + if plugin.passive: + self.logger.info(f"Plugin {plugin.name} is passive") + self.plugins[plugin.name] = plugin.start() + self.plugins[plugin.name].set_emitted_message_callback(log_plugin_emitted_message) + self.logger.info(f"Loaded {plugin.name}") + if not batch: + self.loop.create_task(self.dispatch_plugin(plugin.name, plugin.version)) + except Exception as e: + self.logger.error(f"Could not load {file}. {e}") + print_exc() + + async def dispatch_plugin(self, name: str, version: str | None): + gpui_tab = await get_gamepadui_tab() + await gpui_tab.evaluate_js(f"window.importDeckyPlugin('{name}', '{version}')") + + def import_plugins(self): + self.logger.info(f"import plugins from {self.plugin_path}") + + directories = [i for i in listdir(self.plugin_path) if path.isdir(path.join(self.plugin_path, i)) and path.isfile(path.join(self.plugin_path, i, "plugin.json"))] + for directory in directories: + self.logger.info(f"found plugin: {directory}") + self.import_plugin(path.join(self.plugin_path, directory, "main.py"), directory, False, True) + + async def handle_reloads(self): + while True: + args = await self.reload_queue.get() + self.import_plugin(*args) # type: ignore + + async def handle_plugin_method_call(self, request: web.Request): + res = {} + plugin = self.plugins[request.match_info["plugin_name"]] + method_name = request.match_info["method_name"] + try: + method_info = await request.json() + args: Any = method_info["args"] + except JSONDecodeError: + args = {} + try: + if method_name.startswith("_"): + raise RuntimeError("Tried to call private method") + res["result"] = await plugin.execute_method(method_name, args) + res["success"] = True + except Exception as e: + res["result"] = str(e) + res["success"] = False + return web.json_response(res) + + async def handle_backend_reload_request(self, request: web.Request): + plugin_name : str = request.match_info["plugin_name"] + plugin = self.plugins[plugin_name] + + await self.reload_queue.put((plugin.file, plugin.plugin_directory)) + + return web.Response(status=200) \ No newline at end of file diff --git a/backend/decky_loader/localplatform/localplatform.py b/backend/decky_loader/localplatform/localplatform.py new file mode 100644 index 00000000..028eff8f --- /dev/null +++ b/backend/decky_loader/localplatform/localplatform.py @@ -0,0 +1,52 @@ +import platform, os + +ON_WINDOWS = platform.system() == "Windows" +ON_LINUX = not ON_WINDOWS + +if ON_WINDOWS: + from .localplatformwin import * + from . import localplatformwin as localplatform +else: + from .localplatformlinux import * + from . import localplatformlinux as localplatform + +def get_privileged_path() -> str: + '''Get path accessible by elevated user. Holds plugins, decky loader and decky loader configs''' + return localplatform.get_privileged_path() + +def get_unprivileged_path() -> str: + '''Get path accessible by non-elevated user. Holds plugin configuration, plugin data and plugin logs. Externally referred to as the 'Homebrew' directory''' + return localplatform.get_unprivileged_path() + +def get_unprivileged_user() -> str: + '''Get user that should own files made in unprivileged path''' + return localplatform.get_unprivileged_user() + +def get_chown_plugin_path() -> bool: + return os.getenv("CHOWN_PLUGIN_PATH", "1") == "1" + +def get_server_host() -> str: + return os.getenv("SERVER_HOST", "127.0.0.1") + +def get_server_port() -> int: + return int(os.getenv("SERVER_PORT", "1337")) + +def get_live_reload() -> bool: + return os.getenv("LIVE_RELOAD", "1") == "1" + +def get_keep_systemd_service() -> bool: + return os.getenv("KEEP_SYSTEMD_SERVICE", "0") == "1" + +def get_log_level() -> int: + return {"CRITICAL": 50, "ERROR": 40, "WARNING": 30, "INFO": 20, "DEBUG": 10}[ + os.getenv("LOG_LEVEL", "INFO") + ] + +def get_selinux() -> bool: + if ON_LINUX: + from subprocess import check_output + try: + if (check_output("getenforce").decode("ascii").strip("\n") == "Enforcing"): return True + except FileNotFoundError: + pass + return False diff --git a/backend/decky_loader/localplatform/localplatformlinux.py b/backend/decky_loader/localplatform/localplatformlinux.py new file mode 100644 index 00000000..d5bea6ab --- /dev/null +++ b/backend/decky_loader/localplatform/localplatformlinux.py @@ -0,0 +1,201 @@ +import os, pwd, grp, sys, logging +from subprocess import call, run, DEVNULL, PIPE, STDOUT +from ..customtypes import UserType + +logger = logging.getLogger("localplatform") + +# Get the user id hosting the plugin loader +def _get_user_id() -> int: + return pwd.getpwnam(_get_user()).pw_uid + +# Get the user hosting the plugin loader +def _get_user() -> str: + return get_unprivileged_user() + +# Get the effective user id of the running process +def _get_effective_user_id() -> int: + return os.geteuid() + +# Get the effective user of the running process +def _get_effective_user() -> str: + return pwd.getpwuid(_get_effective_user_id()).pw_name + +# Get the effective user group id of the running process +def _get_effective_user_group_id() -> int: + return os.getegid() + +# Get the effective user group of the running process +def _get_effective_user_group() -> str: + return grp.getgrgid(_get_effective_user_group_id()).gr_name + +# Get the user owner of the given file path. +def _get_user_owner(file_path: str) -> str: + return pwd.getpwuid(os.stat(file_path).st_uid).pw_name + +# Get the user group of the given file path, or the user group hosting the plugin loader +def _get_user_group(file_path: str | None = None) -> str: + return grp.getgrgid(os.stat(file_path).st_gid if file_path is not None else _get_user_group_id()).gr_name + +# Get the group id of the user hosting the plugin loader +def _get_user_group_id() -> int: + return pwd.getpwuid(_get_user_id()).pw_gid + +def chown(path : str, user : UserType = UserType.HOST_USER, recursive : bool = True) -> bool: + user_str = "" + + if user == UserType.HOST_USER: + user_str = _get_user()+":"+_get_user_group() + elif user == UserType.EFFECTIVE_USER: + user_str = _get_effective_user()+":"+_get_effective_user_group() + elif user == UserType.ROOT: + user_str = "root:root" + else: + raise Exception("Unknown User Type") + + result = call(["chown", "-R", user_str, path] if recursive else ["chown", user_str, path]) + return result == 0 + +def chmod(path : str, permissions : int, recursive : bool = True) -> bool: + if _get_effective_user_id() != 0: + return True + result = call(["chmod", "-R", str(permissions), path] if recursive else ["chmod", str(permissions), path]) + return result == 0 + +def folder_owner(path : str) -> UserType|None: + user_owner = _get_user_owner(path) + + if (user_owner == _get_user()): + return UserType.HOST_USER + + elif (user_owner == _get_effective_user()): + return UserType.EFFECTIVE_USER + + else: + return None + +def get_home_path(user : UserType = UserType.HOST_USER) -> str: + user_name = "root" + + if user == UserType.HOST_USER: + user_name = _get_user() + elif user == UserType.EFFECTIVE_USER: + user_name = _get_effective_user() + elif user == UserType.ROOT: + pass + else: + raise Exception("Unknown User Type") + + return pwd.getpwnam(user_name).pw_dir + +def get_username() -> str: + return _get_user() + +def setgid(user : UserType = UserType.HOST_USER): + user_id = 0 + + if user == UserType.HOST_USER: + user_id = _get_user_group_id() + elif user == UserType.ROOT: + pass + else: + raise Exception("Unknown user type") + + os.setgid(user_id) + +def setuid(user : UserType = UserType.HOST_USER): + user_id = 0 + + if user == UserType.HOST_USER: + user_id = _get_user_id() + elif user == UserType.ROOT: + pass + else: + raise Exception("Unknown user type") + + os.setuid(user_id) + +async def service_active(service_name : str) -> bool: + res = run(["systemctl", "is-active", service_name], stdout=DEVNULL, stderr=DEVNULL) + return res.returncode == 0 + +async def service_restart(service_name : str) -> bool: + call(["systemctl", "daemon-reload"]) + cmd = ["systemctl", "restart", service_name] + res = run(cmd, stdout=PIPE, stderr=STDOUT) + return res.returncode == 0 + +async def service_stop(service_name : str) -> bool: + cmd = ["systemctl", "stop", service_name] + res = run(cmd, stdout=PIPE, stderr=STDOUT) + return res.returncode == 0 + +async def service_start(service_name : str) -> bool: + cmd = ["systemctl", "start", service_name] + res = run(cmd, stdout=PIPE, stderr=STDOUT) + return res.returncode == 0 + +def get_privileged_path() -> str: + path = os.getenv("PRIVILEGED_PATH") + + if path == None: + path = get_unprivileged_path() + + os.makedirs(path, exist_ok=True) + + return path + +def _parent_dir(path : str | None) -> str | None: + if path == None: + return None + + if path.endswith('/'): + path = path[:-1] + + return os.path.dirname(path) + +def get_unprivileged_path() -> str: + path = os.getenv("UNPRIVILEGED_PATH") + + if path == None: + path = _parent_dir(os.getenv("PLUGIN_PATH")) + + if path == None: + logger.debug("Unprivileged path is not properly configured. Making something up!") + + if hasattr(sys, 'frozen'): + # Expected path of loader binary is /home/deck/homebrew/service/PluginLoader + path = _parent_dir(_parent_dir(os.path.realpath(sys.argv[0]))) + else: + # Expected path of this file is $src_root/backend/src/localplatformlinux.py + path = _parent_dir(_parent_dir(_parent_dir(__file__))) + + if path != None and not os.path.exists(path): + path = None + + if path == None: + logger.warn("Unprivileged path is not properly configured. Defaulting to /home/deck/homebrew") + path = "/home/deck/homebrew" # We give up + + os.makedirs(path, exist_ok=True) + + return path + + +def get_unprivileged_user() -> str: + user = os.getenv("UNPRIVILEGED_USER") + + if user == None: + # Lets hope we can extract it from the unprivileged dir + dir = os.path.realpath(get_unprivileged_path()) + + pws = sorted(pwd.getpwall(), reverse=True, key=lambda pw: len(pw.pw_dir)) + for pw in pws: + if dir.startswith(os.path.realpath(pw.pw_dir)): + user = pw.pw_name + break + + if user == None: + logger.warn("Unprivileged user is not properly configured. Defaulting to 'deck'") + user = 'deck' + + return user diff --git a/backend/decky_loader/localplatform/localplatformwin.py b/backend/decky_loader/localplatform/localplatformwin.py new file mode 100644 index 00000000..f1a5be17 --- /dev/null +++ b/backend/decky_loader/localplatform/localplatformwin.py @@ -0,0 +1,55 @@ +from ..customtypes import UserType +import os, sys + +def chown(path : str, user : UserType = UserType.HOST_USER, recursive : bool = True) -> bool: + return True # Stubbed + +def chmod(path : str, permissions : int, recursive : bool = True) -> bool: + return True # Stubbed + +def folder_owner(path : str) -> UserType|None: + return UserType.HOST_USER # Stubbed + +def get_home_path(user : UserType = UserType.HOST_USER) -> str: + return os.path.expanduser("~") # Mostly stubbed + +def setgid(user : UserType = UserType.HOST_USER): + pass # Stubbed + +def setuid(user : UserType = UserType.HOST_USER): + pass # Stubbed + +async def service_active(service_name : str) -> bool: + return True # Stubbed + +async def service_stop(service_name : str) -> bool: + return True # Stubbed + +async def service_start(service_name : str) -> bool: + return True # Stubbed + +async def service_restart(service_name : str) -> bool: + if service_name == "plugin_loader": + sys.exit(42) + + return True # Stubbed + +def get_username() -> str: + return os.getlogin() + +def get_privileged_path() -> str: + '''On windows, privileged_path is equal to unprivileged_path''' + return get_unprivileged_path() + +def get_unprivileged_path() -> str: + path = os.getenv("UNPRIVILEGED_PATH") + + if path == None: + path = os.getenv("PRIVILEGED_PATH", os.path.join(os.path.expanduser("~"), "homebrew")) + + os.makedirs(path, exist_ok=True) + + return path + +def get_unprivileged_user() -> str: + return os.getenv("UNPRIVILEGED_USER", os.getlogin()) diff --git a/backend/decky_loader/localplatform/localsocket.py b/backend/decky_loader/localplatform/localsocket.py new file mode 100644 index 00000000..93b1ea18 --- /dev/null +++ b/backend/decky_loader/localplatform/localsocket.py @@ -0,0 +1,145 @@ +import asyncio, time +from typing import Any, Callable, Coroutine +import random + +from .localplatform import ON_WINDOWS + +BUFFER_LIMIT = 2 ** 20 # 1 MiB + +class UnixSocket: + def __init__(self, on_new_message: Callable[[str], Coroutine[Any, Any, Any]]): + ''' + on_new_message takes 1 string argument. + It's return value gets used, if not None, to write data to the socket. + Method should be async + ''' + self.socket_addr = f"/tmp/plugin_socket_{time.time()}" + self.on_new_message = on_new_message + self.socket = None + self.reader = None + self.writer = None + self.server_writer = None + + async def setup_server(self): + self.socket = await asyncio.start_unix_server(self._listen_for_method_call, path=self.socket_addr, limit=BUFFER_LIMIT) + + async def _open_socket_if_not_exists(self): + if not self.reader: + retries = 0 + while retries < 10: + try: + self.reader, self.writer = await asyncio.open_unix_connection(self.socket_addr, limit=BUFFER_LIMIT) + return True + except: + await asyncio.sleep(2) + retries += 1 + return False + else: + return True + + async def get_socket_connection(self): + if not await self._open_socket_if_not_exists(): + return None, None + + return self.reader, self.writer + + async def close_socket_connection(self): + if self.writer != None: + self.writer.close() + + self.reader = None + + async def read_single_line(self) -> str|None: + reader, _ = await self.get_socket_connection() + + try: + assert reader + except AssertionError: + return + + return await self._read_single_line(reader) + + async def write_single_line(self, message : str): + _, writer = await self.get_socket_connection() + + try: + assert writer + except AssertionError: + return + + await self._write_single_line(writer, message) + + async def _read_single_line(self, reader: asyncio.StreamReader) -> str: + line = bytearray() + while True: + try: + line.extend(await reader.readuntil()) + except asyncio.LimitOverrunError: + line.extend(await reader.read(reader._limit)) # type: ignore + continue + except asyncio.IncompleteReadError as err: + line.extend(err.partial) + break + else: + break + + return line.decode("utf-8") + + async def _write_single_line(self, writer: asyncio.StreamWriter, message : str): + if not message.endswith("\n"): + message += "\n" + + writer.write(message.encode("utf-8")) + await writer.drain() + + async def write_single_line_server(self, message: str): + if self.server_writer is None: + return + await self._write_single_line(self.server_writer, message) + + async def _listen_for_method_call(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): + self.server_writer = writer + while True: + + def _(task: asyncio.Task[str|None]): + res = task.result() + if res is not None: + asyncio.create_task(self._write_single_line(writer, res)) + + line = await self._read_single_line(reader) + asyncio.create_task(self.on_new_message(line)).add_done_callback(_) + +class PortSocket (UnixSocket): + def __init__(self, on_new_message: Callable[[str], Coroutine[Any, Any, Any]]): + ''' + on_new_message takes 1 string argument. + It's return value gets used, if not None, to write data to the socket. + Method should be async + ''' + super().__init__(on_new_message) + self.host = "127.0.0.1" + self.port = random.sample(range(40000, 60000), 1)[0] + + async def setup_server(self): + self.socket = await asyncio.start_server(self._listen_for_method_call, host=self.host, port=self.port, limit=BUFFER_LIMIT) + + async def _open_socket_if_not_exists(self): + if not self.reader: + retries = 0 + while retries < 10: + try: + self.reader, self.writer = await asyncio.open_connection(host=self.host, port=self.port, limit=BUFFER_LIMIT) + return True + except: + await asyncio.sleep(2) + retries += 1 + return False + else: + return True + +if ON_WINDOWS: + class LocalSocket (PortSocket): # type: ignore + pass +else: + class LocalSocket (UnixSocket): + pass \ No newline at end of file diff --git a/backend/decky_loader/main.py b/backend/decky_loader/main.py new file mode 100644 index 00000000..fae30574 --- /dev/null +++ b/backend/decky_loader/main.py @@ -0,0 +1,188 @@ +# Change PyInstaller files permissions +import sys +from typing import Dict +from .localplatform.localplatform import (chmod, chown, service_stop, service_start, + ON_WINDOWS, get_log_level, get_live_reload, + get_server_port, get_server_host, get_chown_plugin_path, + get_privileged_path) +if hasattr(sys, '_MEIPASS'): + chmod(sys._MEIPASS, 755) # type: ignore +# Full imports +from asyncio import AbstractEventLoop, new_event_loop, set_event_loop, sleep +from logging import basicConfig, getLogger +from os import path +from traceback import format_exc +import multiprocessing + +import aiohttp_cors # type: ignore +# Partial imports +from aiohttp import client_exceptions +from aiohttp.web import Application, Response, Request, get, run_app, static # type: ignore +from aiohttp_jinja2 import setup as jinja_setup + +# local modules +from .browser import PluginBrowser +from .helpers import (REMOTE_DEBUGGER_UNIT, csrf_middleware, get_csrf_token, get_loader_version, + mkdir_as_user, get_system_pythonpaths, get_effective_user_id) + +from .injector import get_gamepadui_tab, Tab, close_old_tabs +from .loader import Loader +from .settings import SettingsManager +from .updater import Updater +from .utilities import Utilities +from .customtypes import UserType + + +basicConfig( + level=get_log_level(), + format="[%(module)s][%(levelname)s]: %(message)s" +) + +logger = getLogger("Main") +plugin_path = path.join(get_privileged_path(), "plugins") + +def chown_plugin_dir(): + if not path.exists(plugin_path): # For safety, create the folder before attempting to do anything with it + mkdir_as_user(plugin_path) + + if not chown(plugin_path, UserType.HOST_USER) or not chmod(plugin_path, 555): + logger.error(f"chown/chmod exited with a non-zero exit code") + +if get_chown_plugin_path() == True: + chown_plugin_dir() + +class PluginManager: + def __init__(self, loop: AbstractEventLoop) -> None: + self.loop = loop + self.web_app = Application() + self.web_app.middlewares.append(csrf_middleware) + self.cors = aiohttp_cors.setup(self.web_app, defaults={ + "https://steamloopback.host": aiohttp_cors.ResourceOptions( + expose_headers="*", + allow_headers="*", + allow_credentials=True + ) + }) + self.plugin_loader = Loader(self, plugin_path, self.loop, get_live_reload()) + self.settings = SettingsManager("loader", path.join(get_privileged_path(), "settings")) + self.plugin_browser = PluginBrowser(plugin_path, self.plugin_loader.plugins, self.plugin_loader, self.settings) + self.utilities = Utilities(self) + self.updater = Updater(self) + + jinja_setup(self.web_app) + + async def startup(_: Application): + if self.settings.getSetting("cef_forward", False): + self.loop.create_task(service_start(REMOTE_DEBUGGER_UNIT)) + else: + self.loop.create_task(service_stop(REMOTE_DEBUGGER_UNIT)) + self.loop.create_task(self.loader_reinjector()) + self.loop.create_task(self.load_plugins()) + + self.web_app.on_startup.append(startup) + + self.loop.set_exception_handler(self.exception_handler) + self.web_app.add_routes([get("/auth/token", self.get_auth_token)]) + + for route in list(self.web_app.router.routes()): + self.cors.add(route) # type: ignore + self.web_app.add_routes([static("/static", path.join(path.dirname(__file__), '..', 'static'))]) + + def exception_handler(self, loop: AbstractEventLoop, context: Dict[str, str]): + if context["message"] == "Unclosed connection": + return + loop.default_exception_handler(context) + + async def get_auth_token(self, request: Request): + return Response(text=get_csrf_token()) + + async def load_plugins(self): + # await self.wait_for_server() + logger.debug("Loading plugins") + self.plugin_loader.import_plugins() + # await inject_to_tab("SP", "window.syncDeckyPlugins();") + if self.settings.getSetting("pluginOrder", None) == None: + self.settings.setSetting("pluginOrder", list(self.plugin_loader.plugins.keys())) + logger.debug("Did not find pluginOrder setting, set it to default") + + async def loader_reinjector(self): + while True: + tab = None + nf = False + dc = False + while not tab: + try: + tab = await get_gamepadui_tab() + except (client_exceptions.ClientConnectorError, client_exceptions.ServerDisconnectedError): + if not dc: + logger.debug("Couldn't connect to debugger, waiting...") + dc = True + pass + except ValueError: + if not nf: + logger.debug("Couldn't find GamepadUI tab, waiting...") + nf = True + pass + if not tab: + await sleep(5) + await tab.open_websocket() + await tab.enable() + await self.inject_javascript(tab, True) + try: + async for msg in tab.listen_for_message(): + # this gets spammed a lot + if msg.get("method", None) != "Page.navigatedWithinDocument": + logger.debug("Page event: " + str(msg.get("method", None))) + if msg.get("method", None) == "Page.domContentEventFired": + if not await tab.has_global_var("deckyHasLoaded", False): + await self.inject_javascript(tab) + if msg.get("method", None) == "Inspector.detached": + logger.info("CEF has requested that we detach.") + await tab.close_websocket() + break + # If this is a forceful disconnect the loop will just stop without any failure message. In this case, injector.py will handle this for us so we don't need to close the socket. + # This is because of https://github.com/aio-libs/aiohttp/blob/3ee7091b40a1bc58a8d7846e7878a77640e96996/aiohttp/client_ws.py#L321 + logger.info("CEF has disconnected...") + # At this point the loop starts again and we connect to the freshly started Steam client once it is ready. + except Exception: + logger.error("Exception while reading page events " + format_exc()) + await tab.close_websocket() + pass + # while True: + # await sleep(5) + # if not await tab.has_global_var("deckyHasLoaded", False): + # logger.info("Plugin loader isn't present in Steam anymore, reinjecting...") + # await self.inject_javascript(tab) + + async def inject_javascript(self, tab: Tab, first: bool=False, request: Request|None=None): + logger.info("Loading Decky frontend!") + try: + if first: + if await tab.has_global_var("deckyHasLoaded", False): + await close_old_tabs() + await tab.evaluate_js("try{if (window.deckyHasLoaded){setTimeout(() => location.reload(), 100)}else{window.deckyHasLoaded = true;(async()=>{try{while(!window.SP_REACT){await new Promise(r => setTimeout(r, 10))};await import('http://localhost:1337/frontend/index.js?v=%s')}catch(e){console.error(e)};})();}}catch(e){console.error(e)}" % (get_loader_version(), ), False, False, False) + except: + logger.info("Failed to inject JavaScript into tab\n" + format_exc()) + pass + + def run(self): + return run_app(self.web_app, host=get_server_host(), port=get_server_port(), loop=self.loop, access_log=None) + +def main(): + if ON_WINDOWS: + # Fix windows/flask not recognising that .js means 'application/javascript' + import mimetypes + mimetypes.add_type('application/javascript', '.js') + + # Required for multiprocessing support in frozen files + multiprocessing.freeze_support() + else: + if get_effective_user_id() != 0: + logger.warning(f"decky is running as an unprivileged user, this is not officially supported and may cause issues") + + # Append the system and user python paths + sys.path.extend(get_system_pythonpaths()) + + loop = new_event_loop() + set_event_loop(loop) + PluginManager(loop).run() diff --git a/backend/decky_loader/plugin/method_call_request.py b/backend/decky_loader/plugin/method_call_request.py new file mode 100644 index 00000000..cebe34f8 --- /dev/null +++ b/backend/decky_loader/plugin/method_call_request.py @@ -0,0 +1,29 @@ +from typing import Any, TypedDict +from uuid import uuid4 +from asyncio import Event + +class SocketResponseDict(TypedDict): + id: str + success: bool + res: Any + +class MethodCallResponse: + def __init__(self, success: bool, result: Any) -> None: + self.success = success + self.result = result + +class MethodCallRequest: + def __init__(self) -> None: + self.id = str(uuid4()) + self.event = Event() + self.response: MethodCallResponse + + def set_result(self, dc: SocketResponseDict): + self.response = MethodCallResponse(dc["success"], dc["res"]) + self.event.set() + + async def wait_for_result(self): + await self.event.wait() + if not self.response.success: + raise Exception(self.response.result) + return self.response.result \ No newline at end of file diff --git a/backend/decky_loader/plugin/plugin.py b/backend/decky_loader/plugin/plugin.py new file mode 100644 index 00000000..6c338106 --- /dev/null +++ b/backend/decky_loader/plugin/plugin.py @@ -0,0 +1,84 @@ +from asyncio import Task, create_task +from json import dumps, load, loads +from logging import getLogger +from os import path +from multiprocessing import Process + +from .sandboxed_plugin import SandboxedPlugin +from .method_call_request import MethodCallRequest +from ..localplatform.localsocket import LocalSocket + +from typing import Any, Callable, Coroutine, Dict + +class PluginWrapper: + def __init__(self, file: str, plugin_directory: str, plugin_path: str) -> None: + self.file = file + self.plugin_path = plugin_path + self.plugin_directory = plugin_directory + + self.version = None + + json = load(open(path.join(plugin_path, plugin_directory, "plugin.json"), "r", encoding="utf-8")) + if path.isfile(path.join(plugin_path, plugin_directory, "package.json")): + package_json = load(open(path.join(plugin_path, plugin_directory, "package.json"), "r", encoding="utf-8")) + self.version = package_json["version"] + + self.name = json["name"] + self.author = json["author"] + self.flags = json["flags"] + + self.passive = not path.isfile(self.file) + + self.log = getLogger("plugin") + + self.sandboxed_plugin = SandboxedPlugin(self.name, self.passive, self.flags, self.file, self.plugin_directory, self.plugin_path, self.version, self.author) + #TODO: Maybe make LocalSocket not require on_new_message to make this cleaner + self._socket = LocalSocket(self.sandboxed_plugin.on_new_message) + self._listener_task: Task[Any] + self._method_call_requests: Dict[str, MethodCallRequest] = {} + + self.emitted_message_callback: Callable[[Dict[Any, Any]], Coroutine[Any, Any, Any]] + + def __str__(self) -> str: + return self.name + + async def _response_listener(self): + while True: + try: + line = await self._socket.read_single_line() + if line != None: + res = loads(line) + if res["id"] == "0": + create_task(self.emitted_message_callback(res["payload"])) + else: + self._method_call_requests.pop(res["id"]).set_result(res) + except: + pass + + def set_emitted_message_callback(self, callback: Callable[[Dict[Any, Any]], Coroutine[Any, Any, Any]]): + self.emitted_message_callback = callback + + async def execute_method(self, method_name: str, kwargs: Dict[Any, Any]): + if self.passive: + raise RuntimeError("This plugin is passive (aka does not implement main.py)") + + request = MethodCallRequest() + await self._socket.get_socket_connection() + await self._socket.write_single_line(dumps({ "method": method_name, "args": kwargs, "id": request.id }, ensure_ascii=False)) + self._method_call_requests[request.id] = request + + return await request.wait_for_result() + + def start(self): + if self.passive: + return self + Process(target=self.sandboxed_plugin.initialize, args=[self._socket]).start() + self._listener_task = create_task(self._response_listener()) + return self + + def stop(self): + self._listener_task.cancel() + async def _(self: PluginWrapper): + await self._socket.write_single_line(dumps({ "stop": True }, ensure_ascii=False)) + await self._socket.close_socket_connection() + create_task(_(self)) \ No newline at end of file diff --git a/backend/decky_loader/plugin/sandboxed_plugin.py b/backend/decky_loader/plugin/sandboxed_plugin.py new file mode 100644 index 00000000..6be97b4a --- /dev/null +++ b/backend/decky_loader/plugin/sandboxed_plugin.py @@ -0,0 +1,138 @@ +from os import path, environ +from signal import SIGINT, signal +from importlib.util import module_from_spec, spec_from_file_location +from json import dumps, loads +from logging import getLogger +from sys import exit, path as syspath, modules as sysmodules +from traceback import format_exc +from asyncio import (get_event_loop, new_event_loop, + set_event_loop, sleep) + +from .method_call_request import SocketResponseDict +from ..localplatform.localsocket import LocalSocket +from ..localplatform.localplatform import setgid, setuid, get_username, get_home_path +from ..customtypes import UserType +from .. import helpers + +from typing import Any, Dict, List + +class SandboxedPlugin: + def __init__(self, + name: str, + passive: bool, + flags: List[str], + file: str, + plugin_directory: str, + plugin_path: str, + version: str|None, + author: str) -> None: + self.name = name + self.passive = passive + self.flags = flags + self.file = file + self.plugin_path = plugin_path + self.plugin_directory = plugin_directory + self.version = version + self.author = author + + self.log = getLogger("plugin") + + def initialize(self, socket: LocalSocket): + self._socket = socket + + try: + signal(SIGINT, lambda s, f: exit(0)) + + set_event_loop(new_event_loop()) + if self.passive: + return + setgid(UserType.ROOT if "root" in self.flags else UserType.HOST_USER) + setuid(UserType.ROOT if "root" in self.flags else UserType.HOST_USER) + # export a bunch of environment variables to help plugin developers + environ["HOME"] = get_home_path(UserType.ROOT if "root" in self.flags else UserType.HOST_USER) + environ["USER"] = "root" if "root" in self.flags else get_username() + environ["DECKY_VERSION"] = helpers.get_loader_version() + environ["DECKY_USER"] = get_username() + environ["DECKY_USER_HOME"] = helpers.get_home_path() + environ["DECKY_HOME"] = helpers.get_homebrew_path() + environ["DECKY_PLUGIN_SETTINGS_DIR"] = path.join(environ["DECKY_HOME"], "settings", self.plugin_directory) + helpers.mkdir_as_user(path.join(environ["DECKY_HOME"], "settings")) + helpers.mkdir_as_user(environ["DECKY_PLUGIN_SETTINGS_DIR"]) + environ["DECKY_PLUGIN_RUNTIME_DIR"] = path.join(environ["DECKY_HOME"], "data", self.plugin_directory) + helpers.mkdir_as_user(path.join(environ["DECKY_HOME"], "data")) + helpers.mkdir_as_user(environ["DECKY_PLUGIN_RUNTIME_DIR"]) + environ["DECKY_PLUGIN_LOG_DIR"] = path.join(environ["DECKY_HOME"], "logs", self.plugin_directory) + helpers.mkdir_as_user(path.join(environ["DECKY_HOME"], "logs")) + helpers.mkdir_as_user(environ["DECKY_PLUGIN_LOG_DIR"]) + environ["DECKY_PLUGIN_DIR"] = path.join(self.plugin_path, self.plugin_directory) + environ["DECKY_PLUGIN_NAME"] = self.name + if self.version: + environ["DECKY_PLUGIN_VERSION"] = self.version + environ["DECKY_PLUGIN_AUTHOR"] = self.author + + # append the plugin's `py_modules` to the recognized python paths + syspath.append(path.join(environ["DECKY_PLUGIN_DIR"], "py_modules")) + + #TODO: FIX IN A LESS CURSED WAY + keys = [key for key in sysmodules if key.startswith("decky_loader.")] + for key in keys: + sysmodules[key.replace("decky_loader.", "")] = sysmodules[key] + + spec = spec_from_file_location("_", self.file) + assert spec is not None + module = module_from_spec(spec) + assert spec.loader is not None + spec.loader.exec_module(module) + self.Plugin = module.Plugin + + setattr(self.Plugin, "emit_message", self.emit_message) + #TODO: Find how to put emit_message on global namespace so it doesn't pollute Plugin + + if hasattr(self.Plugin, "_migration"): + get_event_loop().run_until_complete(self.Plugin._migration(self.Plugin)) + if hasattr(self.Plugin, "_main"): + get_event_loop().create_task(self.Plugin._main(self.Plugin)) + get_event_loop().create_task(socket.setup_server()) + get_event_loop().run_forever() + except: + self.log.error("Failed to start " + self.name + "!\n" + format_exc()) + exit(0) + + async def _unload(self): + try: + self.log.info("Attempting to unload with plugin " + self.name + "'s \"_unload\" function.\n") + if hasattr(self.Plugin, "_unload"): + await self.Plugin._unload(self.Plugin) + self.log.info("Unloaded " + self.name + "\n") + else: + self.log.info("Could not find \"_unload\" in " + self.name + "'s main.py" + "\n") + except: + self.log.error("Failed to unload " + self.name + "!\n" + format_exc()) + exit(0) + + async def on_new_message(self, message : str) -> str|None: + data = loads(message) + + if "stop" in data: + self.log.info("Calling Loader unload function.") + await self._unload() + get_event_loop().stop() + while get_event_loop().is_running(): + await sleep(0) + get_event_loop().close() + raise Exception("Closing message listener") + + d: SocketResponseDict = {"res": None, "success": True, "id": data["id"]} + try: + d["res"] = await getattr(self.Plugin, data["method"])(self.Plugin, **data["args"]) + except Exception as e: + d["res"] = str(e) + d["success"] = False + finally: + return dumps(d, ensure_ascii=False) + + async def emit_message(self, message: Dict[Any, Any]): + await self._socket.write_single_line_server(dumps({ + "id": "0", + "payload": message + })) \ No newline at end of file diff --git a/backend/decky_loader/settings.py b/backend/decky_loader/settings.py new file mode 100644 index 00000000..c0f2b90c --- /dev/null +++ b/backend/decky_loader/settings.py @@ -0,0 +1,60 @@ +from json import dump, load +from os import mkdir, path, listdir, rename +from typing import Any, Dict +from .localplatform.localplatform import chown, folder_owner, get_chown_plugin_path +from .customtypes import UserType + +from .helpers import get_homebrew_path + + +class SettingsManager: + def __init__(self, name: str, settings_directory: str | None = None) -> None: + wrong_dir = get_homebrew_path() + if settings_directory == None: + settings_directory = path.join(wrong_dir, "settings") + + self.path = path.join(settings_directory, name + ".json") + + #Create the folder with the correct permission + if not path.exists(settings_directory): + mkdir(settings_directory) + + #Copy all old settings file in the root directory to the correct folder + for file in listdir(wrong_dir): + if file.endswith(".json"): + rename(path.join(wrong_dir,file), + path.join(settings_directory, file)) + self.path = path.join(settings_directory, name + ".json") + + + #If the owner of the settings directory is not the user, then set it as the user: + expected_user = UserType.HOST_USER if get_chown_plugin_path() else UserType.ROOT + if folder_owner(settings_directory) != expected_user: + chown(settings_directory, expected_user, False) + + self.settings: Dict[str, Any] = {} + + try: + open(self.path, "x", encoding="utf-8") + except FileExistsError as _: + self.read() + pass + + def read(self): + try: + with open(self.path, "r", encoding="utf-8") as file: + self.settings = load(file) + except Exception as e: + print(e) + pass + + def commit(self): + with open(self.path, "w+", encoding="utf-8") as file: + dump(self.settings, file, indent=4, ensure_ascii=False) + + def getSetting(self, key: str, default: Any = None) -> Any: + return self.settings.get(key, default) + + def setSetting(self, key: str, value: Any) -> Any: + self.settings[key] = value + self.commit() diff --git a/backend/decky_loader/updater.py b/backend/decky_loader/updater.py new file mode 100644 index 00000000..f8aef429 --- /dev/null +++ b/backend/decky_loader/updater.py @@ -0,0 +1,238 @@ +from __future__ import annotations +import os +import shutil +from asyncio import sleep +from json.decoder import JSONDecodeError +from logging import getLogger +from os import getcwd, path, remove +from typing import TYPE_CHECKING, List, TypedDict +if TYPE_CHECKING: + from .main import PluginManager +from .localplatform.localplatform import chmod, service_restart, ON_LINUX, get_keep_systemd_service, get_selinux + +from aiohttp import ClientSession, web + +from . import helpers +from .injector import get_gamepadui_tab +from .settings import SettingsManager + +logger = getLogger("Updater") + +class RemoteVerAsset(TypedDict): + name: str + browser_download_url: str +class RemoteVer(TypedDict): + tag_name: str + prerelease: bool + assets: List[RemoteVerAsset] + +class Updater: + def __init__(self, context: PluginManager) -> None: + self.context = context + self.settings = self.context.settings + # Exposes updater methods to frontend + self.updater_methods = { + "get_branch": self._get_branch, + "get_version": self.get_version, + "do_update": self.do_update, + "do_restart": self.do_restart, + "check_for_updates": self.check_for_updates + } + self.remoteVer: RemoteVer | None = None + self.allRemoteVers: List[RemoteVer] = [] + self.localVer = helpers.get_loader_version() + + try: + self.currentBranch = self.get_branch(self.context.settings) + except: + self.currentBranch = 0 + logger.error("Current branch could not be determined, defaulting to \"Stable\"") + + if context: + context.web_app.add_routes([ + web.post("/updater/{method_name}", self._handle_server_method_call) + ]) + context.loop.create_task(self.version_reloader()) + + async def _handle_server_method_call(self, request: web.Request): + method_name = request.match_info["method_name"] + try: + args = await request.json() + except JSONDecodeError: + args = {} + res = {} + try: + r = await self.updater_methods[method_name](**args) # type: ignore + res["result"] = r + res["success"] = True + except Exception as e: + res["result"] = str(e) + res["success"] = False + return web.json_response(res) + + def get_branch(self, manager: SettingsManager): + ver = manager.getSetting("branch", -1) + logger.debug("current branch: %i" % ver) + if ver == -1: + logger.info("Current branch is not set, determining branch from version...") + if self.localVer.startswith("v") and "-pre" in self.localVer: + logger.info("Current version determined to be pre-release") + manager.setSetting('branch', 1) + return 1 + else: + logger.info("Current version determined to be stable") + manager.setSetting('branch', 0) + return 0 + return ver + + async def _get_branch(self, manager: SettingsManager): + return self.get_branch(manager) + + # retrieve relevant service file's url for each branch + def get_service_url(self): + logger.debug("Getting service URL") + branch = self.get_branch(self.context.settings) + match branch: + case 0: + url = "https://raw.githubusercontent.com/SteamDeckHomebrew/decky-loader/main/dist/plugin_loader-release.service" + case 1 | 2: + url = "https://raw.githubusercontent.com/SteamDeckHomebrew/decky-loader/main/dist/plugin_loader-prerelease.service" + case _: + logger.error("You have an invalid branch set... Defaulting to prerelease service, please send the logs to the devs!") + url = "https://raw.githubusercontent.com/SteamDeckHomebrew/decky-loader/main/dist/plugin_loader-prerelease.service" + return str(url) + + async def get_version(self): + return { + "current": self.localVer, + "remote": self.remoteVer, + "all": self.allRemoteVers, + "updatable": self.localVer != "unknown" + } + + async def check_for_updates(self): + logger.debug("checking for updates") + selectedBranch = self.get_branch(self.context.settings) + async with ClientSession() as web: + async with web.request("GET", "https://api.github.com/repos/SteamDeckHomebrew/decky-loader/releases", ssl=helpers.get_ssl_context()) as res: + remoteVersions: List[RemoteVer] = await res.json() + if selectedBranch == 0: + logger.debug("release type: release") + remoteVersions = list(filter(lambda ver: ver["tag_name"].startswith("v") and not ver["prerelease"] and not ver["tag_name"].find("-pre") > 0 and ver["tag_name"], remoteVersions)) + elif selectedBranch == 1: + logger.debug("release type: pre-release") + remoteVersions = list(filter(lambda ver:ver["tag_name"].startswith("v"), remoteVersions)) + else: + logger.error("release type: NOT FOUND") + raise ValueError("no valid branch found") + self.allRemoteVers = remoteVersions + logger.debug("determining release type to find, branch is %i" % selectedBranch) + if selectedBranch == 0: + logger.debug("release type: release") + self.remoteVer = next(filter(lambda ver: ver["tag_name"].startswith("v") and not ver["prerelease"] and not ver["tag_name"].find("-pre") > 0 and ver["tag_name"], remoteVersions), None) + elif selectedBranch == 1: + logger.debug("release type: pre-release") + self.remoteVer = next(filter(lambda ver:ver["tag_name"].startswith("v"), remoteVersions), None) + else: + logger.error("release type: NOT FOUND") + raise ValueError("no valid branch found") + logger.info("Updated remote version information") + tab = await get_gamepadui_tab() + await tab.evaluate_js(f"window.DeckyPluginLoader.notifyUpdates()", False, True, False) + return await self.get_version() + + async def version_reloader(self): + await sleep(30) + while True: + try: + await self.check_for_updates() + except: + pass + await sleep(60 * 60 * 6) # 6 hours + + async def do_update(self): + logger.debug("Starting update.") + try: + assert self.remoteVer + except AssertionError: + logger.error("Unable to update as remoteVer is missing") + return + + version = self.remoteVer["tag_name"] + download_url = None + download_filename = "PluginLoader" if ON_LINUX else "PluginLoader.exe" + download_temp_filename = download_filename + ".new" + + for x in self.remoteVer["assets"]: + if x["name"] == download_filename: + download_url = x["browser_download_url"] + break + + if download_url == None: + raise Exception("Download url not found") + + service_url = self.get_service_url() + logger.debug("Retrieved service URL") + + tab = await get_gamepadui_tab() + await tab.open_websocket() + async with ClientSession() as web: + if ON_LINUX and not get_keep_systemd_service(): + logger.debug("Downloading systemd service") + # download the relevant systemd service depending upon branch + async with web.request("GET", service_url, ssl=helpers.get_ssl_context(), allow_redirects=True) as res: + logger.debug("Downloading service file") + data = await res.content.read() + logger.debug(str(data)) + service_file_path = path.join(getcwd(), "plugin_loader.service") + try: + with open(path.join(getcwd(), "plugin_loader.service"), "wb") as out: + out.write(data) + except Exception as e: + logger.error(f"Error at %s", exc_info=e) + with open(path.join(getcwd(), "plugin_loader.service"), "r", encoding="utf-8") as service_file: + service_data = service_file.read() + service_data = service_data.replace("${HOMEBREW_FOLDER}", helpers.get_homebrew_path()) + with open(path.join(getcwd(), "plugin_loader.service"), "w", encoding="utf-8") as service_file: + service_file.write(service_data) + + logger.debug("Saved service file") + logger.debug("Copying service file over current file.") + shutil.copy(service_file_path, "/etc/systemd/system/plugin_loader.service") + if not os.path.exists(path.join(getcwd(), ".systemd")): + os.mkdir(path.join(getcwd(), ".systemd")) + shutil.move(service_file_path, path.join(getcwd(), ".systemd")+"/plugin_loader.service") + + logger.debug("Downloading binary") + async with web.request("GET", download_url, ssl=helpers.get_ssl_context(), allow_redirects=True) as res: + total = int(res.headers.get('content-length', 0)) + with open(path.join(getcwd(), download_temp_filename), "wb") as out: + progress = 0 + raw = 0 + async for c in res.content.iter_chunked(512): + out.write(c) + raw += len(c) + new_progress = round((raw / total) * 100) + if progress != new_progress: + self.context.loop.create_task(tab.evaluate_js(f"window.DeckyUpdater.updateProgress({new_progress})", False, False, False)) + progress = new_progress + + with open(path.join(getcwd(), ".loader.version"), "w", encoding="utf-8") as out: + out.write(version) + + if ON_LINUX: + remove(path.join(getcwd(), download_filename)) + shutil.move(path.join(getcwd(), download_temp_filename), path.join(getcwd(), download_filename)) + chmod(path.join(getcwd(), download_filename), 777, False) + if get_selinux(): + from asyncio.subprocess import create_subprocess_exec + process = await create_subprocess_exec("chcon", "-t", "bin_t", path.join(getcwd(), download_filename)) + logger.info(f"Setting the executable flag with chcon returned {await process.wait()}") + + logger.info("Updated loader installation.") + await tab.evaluate_js("window.DeckyUpdater.finish()", False, False) + await self.do_restart() + await tab.close_websocket() + + async def do_restart(self): + await service_restart("plugin_loader") diff --git a/backend/decky_loader/utilities.py b/backend/decky_loader/utilities.py new file mode 100644 index 00000000..f04ed371 --- /dev/null +++ b/backend/decky_loader/utilities.py @@ -0,0 +1,373 @@ +from __future__ import annotations +from os import stat_result +import uuid +from json.decoder import JSONDecodeError +from os.path import splitext +import re +from traceback import format_exc +from stat import FILE_ATTRIBUTE_HIDDEN # type: ignore + +from asyncio import StreamReader, StreamWriter, start_server, gather, open_connection +from aiohttp import ClientSession, web +from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Any, List, TypedDict + +from logging import getLogger +from pathlib import Path + +from .browser import PluginInstallRequest, PluginInstallType +if TYPE_CHECKING: + from .main import PluginManager +from .injector import inject_to_tab, get_gamepadui_tab, close_old_tabs, get_tab +from .localplatform.localplatform import ON_WINDOWS +from . import helpers +from .localplatform.localplatform import service_stop, service_start, get_home_path, get_username + +class FilePickerObj(TypedDict): + file: Path + filest: stat_result + is_dir: bool + +class Utilities: + def __init__(self, context: PluginManager) -> None: + self.context = context + self.util_methods: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = { + "ping": self.ping, + "http_request": self.http_request, + "install_plugin": self.install_plugin, + "install_plugins": self.install_plugins, + "cancel_plugin_install": self.cancel_plugin_install, + "confirm_plugin_install": self.confirm_plugin_install, + "uninstall_plugin": self.uninstall_plugin, + "execute_in_tab": self.execute_in_tab, + "inject_css_into_tab": self.inject_css_into_tab, + "remove_css_from_tab": self.remove_css_from_tab, + "allow_remote_debugging": self.allow_remote_debugging, + "disallow_remote_debugging": self.disallow_remote_debugging, + "set_setting": self.set_setting, + "get_setting": self.get_setting, + "filepicker_ls": self.filepicker_ls, + "disable_rdt": self.disable_rdt, + "enable_rdt": self.enable_rdt, + "get_tab_id": self.get_tab_id, + "get_user_info": self.get_user_info, + } + + self.logger = getLogger("Utilities") + + self.rdt_proxy_server = None + self.rdt_script_id = None + self.rdt_proxy_task = None + + if context: + context.web_app.add_routes([ + web.post("/methods/{method_name}", self._handle_server_method_call) + ]) + + async def _handle_server_method_call(self, request: web.Request): + method_name = request.match_info["method_name"] + try: + args = await request.json() + except JSONDecodeError: + args = {} + res = {} + try: + r = await self.util_methods[method_name](**args) + res["result"] = r + res["success"] = True + except Exception as e: + res["result"] = str(e) + res["success"] = False + return web.json_response(res) + + async def install_plugin(self, artifact: str="", name: str="No name", version: str="dev", hash: str="", install_type: PluginInstallType=PluginInstallType.INSTALL): + return await self.context.plugin_browser.request_plugin_install( + artifact=artifact, + name=name, + version=version, + hash=hash, + install_type=install_type + ) + + async def install_plugins(self, requests: List[PluginInstallRequest]): + return await self.context.plugin_browser.request_multiple_plugin_installs( + requests=requests + ) + + async def confirm_plugin_install(self, request_id: str): + return await self.context.plugin_browser.confirm_plugin_install(request_id) + + async def cancel_plugin_install(self, request_id: str): + return self.context.plugin_browser.cancel_plugin_install(request_id) + + async def uninstall_plugin(self, name: str): + return await self.context.plugin_browser.uninstall_plugin(name) + + async def http_request(self, method: str="", url: str="", **kwargs: Any): + async with ClientSession() as web: + res = await web.request(method, url, ssl=helpers.get_ssl_context(), **kwargs) + text = await res.text() + return { + "status": res.status, + "headers": dict(res.headers), + "body": text + } + + async def ping(self, **kwargs: Any): + return "pong" + + async def execute_in_tab(self, tab: str, run_async: bool, code: str): + try: + result = await inject_to_tab(tab, code, run_async) + assert result + if "exceptionDetails" in result["result"]: + return { + "success": False, + "result": result["result"] + } + + return { + "success": True, + "result": result["result"]["result"].get("value") + } + except Exception as e: + return { + "success": False, + "result": e + } + + async def inject_css_into_tab(self, tab: str, style: str): + try: + css_id = str(uuid.uuid4()) + + result = await inject_to_tab(tab, + f""" + (function() {{ + const style = document.createElement('style'); + style.id = "{css_id}"; + document.head.append(style); + style.textContent = `{style}`; + }})() + """, False) + + if result and "exceptionDetails" in result["result"]: + return { + "success": False, + "result": result["result"] + } + + return { + "success": True, + "result": css_id + } + except Exception as e: + return { + "success": False, + "result": e + } + + async def remove_css_from_tab(self, tab: str, css_id: str): + try: + result = await inject_to_tab(tab, + f""" + (function() {{ + let style = document.getElementById("{css_id}"); + + if (style.nodeName.toLowerCase() == 'style') + style.parentNode.removeChild(style); + }})() + """, False) + + if result and "exceptionDetails" in result["result"]: + return { + "success": False, + "result": result + } + + return { + "success": True + } + except Exception as e: + return { + "success": False, + "result": e + } + + async def get_setting(self, key: str, default: Any): + return self.context.settings.getSetting(key, default) + + async def set_setting(self, key: str, value: Any): + return self.context.settings.setSetting(key, value) + + async def allow_remote_debugging(self): + await service_start(helpers.REMOTE_DEBUGGER_UNIT) + return True + + async def disallow_remote_debugging(self): + await service_stop(helpers.REMOTE_DEBUGGER_UNIT) + return True + + async def filepicker_ls(self, + path : str | None = None, + include_files: bool = True, + include_folders: bool = True, + include_ext: list[str] = [], + include_hidden: bool = False, + order_by: str = "name_asc", + filter_for: str | None = None, + page: int = 1, + max: int = 1000): + + if path == None: + path = get_home_path() + + path_obj = Path(path).resolve() + + files: List[FilePickerObj] = [] + folders: List[FilePickerObj] = [] + + #Resolving all files/folders in the requested directory + for file in path_obj.iterdir(): + if file.exists(): + filest = file.stat() + is_hidden = file.name.startswith('.') + if ON_WINDOWS and not is_hidden: + is_hidden = bool(filest.st_file_attributes & FILE_ATTRIBUTE_HIDDEN) # type: ignore + if include_folders and file.is_dir(): + if (is_hidden and include_hidden) or not is_hidden: + folders.append({"file": file, "filest": filest, "is_dir": True}) + elif include_files: + # Handle requested extensions if present + if len(include_ext) == 0 or 'all_files' in include_ext \ + or splitext(file.name)[1].lstrip('.') in include_ext: + if (is_hidden and include_hidden) or not is_hidden: + files.append({"file": file, "filest": filest, "is_dir": False}) + # Filter logic + if filter_for is not None: + try: + if re.compile(filter_for): + files = list(filter(lambda file: re.search(filter_for, file["file"].name) != None, files)) + except re.error: + files = list(filter(lambda file: file["file"].name.find(filter_for) != -1, files)) + + # Ordering logic + ord_arg = order_by.split("_") + ord = ord_arg[0] + rev = True if ord_arg[1] == "asc" else False + match ord: + case 'name': + files.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) + folders.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) + case 'modified': + files.sort(key=lambda x: x['filest'].st_mtime, reverse = not rev) + folders.sort(key=lambda x: x['filest'].st_mtime, reverse = not rev) + case 'created': + files.sort(key=lambda x: x['filest'].st_ctime, reverse = not rev) + folders.sort(key=lambda x: x['filest'].st_ctime, reverse = not rev) + case 'size': + files.sort(key=lambda x: x['filest'].st_size, reverse = not rev) + # Folders has no file size, order by name instead + folders.sort(key=lambda x: x['file'].name.casefold()) + case _: + files.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) + folders.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) + + #Constructing the final file list, folders first + all = [{ + "isdir": x['is_dir'], + "name": str(x['file'].name), + "realpath": str(x['file']), + "size": x['filest'].st_size, + "modified": x['filest'].st_mtime, + "created": x['filest'].st_ctime, + } for x in folders + files ] + + return { + "realpath": str(path), + "files": all[(page-1)*max:(page)*max], + "total": len(all), + } + + + # Based on https://stackoverflow.com/a/46422554/13174603 + def start_rdt_proxy(self, ip: str, port: int): + async def pipe(reader: StreamReader, writer: StreamWriter): + try: + while not reader.at_eof(): + writer.write(await reader.read(2048)) + finally: + writer.close() + async def handle_client(local_reader: StreamReader, local_writer: StreamWriter): + try: + remote_reader, remote_writer = await open_connection( + ip, port) + pipe1 = pipe(local_reader, remote_writer) + pipe2 = pipe(remote_reader, local_writer) + await gather(pipe1, pipe2) + finally: + local_writer.close() + + self.rdt_proxy_server = start_server(handle_client, "127.0.0.1", port) + self.rdt_proxy_task = self.context.loop.create_task(self.rdt_proxy_server) + + def stop_rdt_proxy(self): + if self.rdt_proxy_server != None: + self.rdt_proxy_server.close() + if self.rdt_proxy_task: + self.rdt_proxy_task.cancel() + + async def _enable_rdt(self): + # TODO un-hardcode port + try: + self.stop_rdt_proxy() + ip = self.context.settings.getSetting("developer.rdt.ip", None) + + if ip != None: + self.logger.info("Connecting to React DevTools at " + ip) + async with ClientSession() as web: + res = await web.request("GET", "http://" + ip + ":8097", ssl=helpers.get_ssl_context()) + script = """ + if (!window.deckyHasConnectedRDT) { + window.deckyHasConnectedRDT = true; + // This fixes the overlay when hovering over an element in RDT + Object.defineProperty(window, '__REACT_DEVTOOLS_TARGET_WINDOW__', { + enumerable: true, + configurable: true, + get: function() { + return (GamepadNavTree?.m_context?.m_controller || FocusNavController)?.m_ActiveContext?.ActiveWindow || window; + } + }); + """ + await res.text() + "\n}" + if res.status != 200: + self.logger.error("Failed to connect to React DevTools at " + ip) + return False + self.start_rdt_proxy(ip, 8097) + self.logger.info("Connected to React DevTools, loading script") + tab = await get_gamepadui_tab() + # RDT needs to load before React itself to work. + await close_old_tabs() + result = await tab.reload_and_evaluate(script) + self.logger.info(result) + + except Exception: + self.logger.error("Failed to connect to React DevTools") + self.logger.error(format_exc()) + + async def enable_rdt(self): + self.context.loop.create_task(self._enable_rdt()) + + async def disable_rdt(self): + self.logger.info("Disabling React DevTools") + tab = await get_gamepadui_tab() + self.rdt_script_id = None + await close_old_tabs() + await tab.evaluate_js("location.reload();", False, True, False) + self.logger.info("React DevTools disabled") + + async def get_user_info(self) -> Dict[str, str]: + return { + "username": get_username(), + "path": get_home_path() + } + + async def get_tab_id(self, name: str): + return (await get_tab(name)).id diff --git a/backend/decky_plugin.py b/backend/decky_plugin.py new file mode 100644 index 00000000..35b872a0 --- /dev/null +++ b/backend/decky_plugin.py @@ -0,0 +1,209 @@ +""" +This module exposes various constants and helpers useful for decky plugins. + +* Plugin's settings and configurations should be stored under `DECKY_PLUGIN_SETTINGS_DIR`. +* Plugin's runtime data should be stored under `DECKY_PLUGIN_RUNTIME_DIR`. +* Plugin's persistent log files should be stored under `DECKY_PLUGIN_LOG_DIR`. + +Avoid writing outside of `DECKY_HOME`, storing under the suggested paths is strongly recommended. + +Some basic migration helpers are available: `migrate_any`, `migrate_settings`, `migrate_runtime`, `migrate_logs`. + +A logging facility `logger` is available which writes to the recommended location. +""" + +__version__ = '0.1.0' + +import os +import subprocess +import logging +import time + +""" +Constants +""" + +HOME: str = os.getenv("HOME", default="") +""" +The home directory of the effective user running the process. +Environment variable: `HOME`. +If `root` was specified in the plugin's flags it will be `/root` otherwise the user whose home decky resides in. +e.g.: `/home/deck` +""" + +USER: str = os.getenv("USER", default="") +""" +The effective username running the process. +Environment variable: `USER`. +It would be `root` if `root` was specified in the plugin's flags otherwise the user whose home decky resides in. +e.g.: `deck` +""" + +DECKY_VERSION: str = os.getenv("DECKY_VERSION", default="") +""" +The version of the decky loader. +Environment variable: `DECKY_VERSION`. +e.g.: `v2.5.0-pre1` +""" + +DECKY_USER: str = os.getenv("DECKY_USER", default="") +""" +The user whose home decky resides in. +Environment variable: `DECKY_USER`. +e.g.: `deck` +""" + +DECKY_USER_HOME: str = os.getenv("DECKY_USER_HOME", default="") +""" +The home of the user where decky resides in. +Environment variable: `DECKY_USER_HOME`. +e.g.: `/home/deck` +""" + +DECKY_HOME: str = os.getenv("DECKY_HOME", default="") +""" +The root of the decky folder. +Environment variable: `DECKY_HOME`. +e.g.: `/home/deck/homebrew` +""" + +DECKY_PLUGIN_SETTINGS_DIR: str = os.getenv( + "DECKY_PLUGIN_SETTINGS_DIR", default="") +""" +The recommended path in which to store configuration files (created automatically). +Environment variable: `DECKY_PLUGIN_SETTINGS_DIR`. +e.g.: `/home/deck/homebrew/settings/decky-plugin-template` +""" + +DECKY_PLUGIN_RUNTIME_DIR: str = os.getenv( + "DECKY_PLUGIN_RUNTIME_DIR", default="") +""" +The recommended path in which to store runtime data (created automatically). +Environment variable: `DECKY_PLUGIN_RUNTIME_DIR`. +e.g.: `/home/deck/homebrew/data/decky-plugin-template` +""" + +DECKY_PLUGIN_LOG_DIR: str = os.getenv("DECKY_PLUGIN_LOG_DIR", default="") +""" +The recommended path in which to store persistent logs (created automatically). +Environment variable: `DECKY_PLUGIN_LOG_DIR`. +e.g.: `/home/deck/homebrew/logs/decky-plugin-template` +""" + +DECKY_PLUGIN_DIR: str = os.getenv("DECKY_PLUGIN_DIR", default="") +""" +The root of the plugin's directory. +Environment variable: `DECKY_PLUGIN_DIR`. +e.g.: `/home/deck/homebrew/plugins/decky-plugin-template` +""" + +DECKY_PLUGIN_NAME: str = os.getenv("DECKY_PLUGIN_NAME", default="") +""" +The name of the plugin as specified in the 'plugin.json'. +Environment variable: `DECKY_PLUGIN_NAME`. +e.g.: `Example Plugin` +""" + +DECKY_PLUGIN_VERSION: str = os.getenv("DECKY_PLUGIN_VERSION", default="") +""" +The version of the plugin as specified in the 'package.json'. +Environment variable: `DECKY_PLUGIN_VERSION`. +e.g.: `0.0.1` +""" + +DECKY_PLUGIN_AUTHOR: str = os.getenv("DECKY_PLUGIN_AUTHOR", default="") +""" +The author of the plugin as specified in the 'plugin.json'. +Environment variable: `DECKY_PLUGIN_AUTHOR`. +e.g.: `John Doe` +""" + +__cur_time = time.strftime("%Y-%m-%d %H.%M.%S") +DECKY_PLUGIN_LOG: str = os.path.join(DECKY_PLUGIN_LOG_DIR, f"{__cur_time}.log") +""" +The path to the plugin's main logfile. +Environment variable: `DECKY_PLUGIN_LOG`. +e.g.: `/home/deck/homebrew/logs/decky-plugin-template/plugin.log` +""" + +""" +Migration helpers +""" + + +def migrate_any(target_dir: str, *files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories to a new location and remove old locations. + Specified files will be migrated to `target_dir`. + Specified directories will have their contents recursively migrated to `target_dir`. + + Returns the mapping of old -> new location. + """ + file_map: dict[str, str] = {} + for f in files_or_directories: + if not os.path.exists(f): + file_map[f] = "" + continue + if os.path.isdir(f): + src_dir = f + src_file = "." + file_map[f] = target_dir + else: + src_dir = os.path.dirname(f) + src_file = os.path.basename(f) + file_map[f] = os.path.join(target_dir, src_file) + subprocess.run(["sh", "-c", "mkdir -p \"$3\"; tar -cf - -C \"$1\" \"$2\" | tar -xf - -C \"$3\" && rm -rf \"$4\"", + "_", src_dir, src_file, target_dir, f]) + return file_map + + +def migrate_settings(*files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories relating to plugin settings to the recommended location and remove old locations. + Specified files will be migrated to `DECKY_PLUGIN_SETTINGS_DIR`. + Specified directories will have their contents recursively migrated to `DECKY_PLUGIN_SETTINGS_DIR`. + + Returns the mapping of old -> new location. + """ + return migrate_any(DECKY_PLUGIN_SETTINGS_DIR, *files_or_directories) + + +def migrate_runtime(*files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories relating to plugin runtime data to the recommended location and remove old locations + Specified files will be migrated to `DECKY_PLUGIN_RUNTIME_DIR`. + Specified directories will have their contents recursively migrated to `DECKY_PLUGIN_RUNTIME_DIR`. + + Returns the mapping of old -> new location. + """ + return migrate_any(DECKY_PLUGIN_RUNTIME_DIR, *files_or_directories) + + +def migrate_logs(*files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories relating to plugin logs to the recommended location and remove old locations. + Specified files will be migrated to `DECKY_PLUGIN_LOG_DIR`. + Specified directories will have their contents recursively migrated to `DECKY_PLUGIN_LOG_DIR`. + + Returns the mapping of old -> new location. + """ + return migrate_any(DECKY_PLUGIN_LOG_DIR, *files_or_directories) + + +""" +Logging +""" + +try: + for x in [entry.name for entry in sorted(os.scandir(DECKY_PLUGIN_LOG_DIR),key=lambda x: x.stat().st_mtime, reverse=True) if entry.name.endswith(".log")][4:]: + os.unlink(os.path.join(DECKY_PLUGIN_LOG_DIR, x)) +except Exception as e: + print(f"Failed to delete old logs: {str(e)}") + +logging.basicConfig(filename=DECKY_PLUGIN_LOG, + format='[%(asctime)s][%(levelname)s]: %(message)s', + force=True) +logger: logging.Logger = logging.getLogger() +"""The main plugin logger writing to `DECKY_PLUGIN_LOG`.""" + +logger.setLevel(logging.INFO) diff --git a/backend/decky_plugin.pyi b/backend/decky_plugin.pyi new file mode 100644 index 00000000..b311a55a --- /dev/null +++ b/backend/decky_plugin.pyi @@ -0,0 +1,173 @@ +""" +This module exposes various constants and helpers useful for decky plugins. + +* Plugin's settings and configurations should be stored under `DECKY_PLUGIN_SETTINGS_DIR`. +* Plugin's runtime data should be stored under `DECKY_PLUGIN_RUNTIME_DIR`. +* Plugin's persistent log files should be stored under `DECKY_PLUGIN_LOG_DIR`. + +Avoid writing outside of `DECKY_HOME`, storing under the suggested paths is strongly recommended. + +Some basic migration helpers are available: `migrate_any`, `migrate_settings`, `migrate_runtime`, `migrate_logs`. + +A logging facility `logger` is available which writes to the recommended location. +""" + +__version__ = '0.1.0' + +import logging + +""" +Constants +""" + +HOME: str +""" +The home directory of the effective user running the process. +Environment variable: `HOME`. +If `root` was specified in the plugin's flags it will be `/root` otherwise the user whose home decky resides in. +e.g.: `/home/deck` +""" + +USER: str +""" +The effective username running the process. +Environment variable: `USER`. +It would be `root` if `root` was specified in the plugin's flags otherwise the user whose home decky resides in. +e.g.: `deck` +""" + +DECKY_VERSION: str +""" +The version of the decky loader. +Environment variable: `DECKY_VERSION`. +e.g.: `v2.5.0-pre1` +""" + +DECKY_USER: str +""" +The user whose home decky resides in. +Environment variable: `DECKY_USER`. +e.g.: `deck` +""" + +DECKY_USER_HOME: str +""" +The home of the user where decky resides in. +Environment variable: `DECKY_USER_HOME`. +e.g.: `/home/deck` +""" + +DECKY_HOME: str +""" +The root of the decky folder. +Environment variable: `DECKY_HOME`. +e.g.: `/home/deck/homebrew` +""" + +DECKY_PLUGIN_SETTINGS_DIR: str +""" +The recommended path in which to store configuration files (created automatically). +Environment variable: `DECKY_PLUGIN_SETTINGS_DIR`. +e.g.: `/home/deck/homebrew/settings/decky-plugin-template` +""" + +DECKY_PLUGIN_RUNTIME_DIR: str +""" +The recommended path in which to store runtime data (created automatically). +Environment variable: `DECKY_PLUGIN_RUNTIME_DIR`. +e.g.: `/home/deck/homebrew/data/decky-plugin-template` +""" + +DECKY_PLUGIN_LOG_DIR: str +""" +The recommended path in which to store persistent logs (created automatically). +Environment variable: `DECKY_PLUGIN_LOG_DIR`. +e.g.: `/home/deck/homebrew/logs/decky-plugin-template` +""" + +DECKY_PLUGIN_DIR: str +""" +The root of the plugin's directory. +Environment variable: `DECKY_PLUGIN_DIR`. +e.g.: `/home/deck/homebrew/plugins/decky-plugin-template` +""" + +DECKY_PLUGIN_NAME: str +""" +The name of the plugin as specified in the 'plugin.json'. +Environment variable: `DECKY_PLUGIN_NAME`. +e.g.: `Example Plugin` +""" + +DECKY_PLUGIN_VERSION: str +""" +The version of the plugin as specified in the 'package.json'. +Environment variable: `DECKY_PLUGIN_VERSION`. +e.g.: `0.0.1` +""" + +DECKY_PLUGIN_AUTHOR: str +""" +The author of the plugin as specified in the 'plugin.json'. +Environment variable: `DECKY_PLUGIN_AUTHOR`. +e.g.: `John Doe` +""" + +DECKY_PLUGIN_LOG: str +""" +The path to the plugin's main logfile. +Environment variable: `DECKY_PLUGIN_LOG`. +e.g.: `/home/deck/homebrew/logs/decky-plugin-template/plugin.log` +""" + +""" +Migration helpers +""" + + +def migrate_any(target_dir: str, *files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories to a new location and remove old locations. + Specified files will be migrated to `target_dir`. + Specified directories will have their contents recursively migrated to `target_dir`. + + Returns the mapping of old -> new location. + """ + + +def migrate_settings(*files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories relating to plugin settings to the recommended location and remove old locations. + Specified files will be migrated to `DECKY_PLUGIN_SETTINGS_DIR`. + Specified directories will have their contents recursively migrated to `DECKY_PLUGIN_SETTINGS_DIR`. + + Returns the mapping of old -> new location. + """ + + +def migrate_runtime(*files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories relating to plugin runtime data to the recommended location and remove old locations + Specified files will be migrated to `DECKY_PLUGIN_RUNTIME_DIR`. + Specified directories will have their contents recursively migrated to `DECKY_PLUGIN_RUNTIME_DIR`. + + Returns the mapping of old -> new location. + """ + + +def migrate_logs(*files_or_directories: str) -> dict[str, str]: + """ + Migrate files and directories relating to plugin logs to the recommended location and remove old locations. + Specified files will be migrated to `DECKY_PLUGIN_LOG_DIR`. + Specified directories will have their contents recursively migrated to `DECKY_PLUGIN_LOG_DIR`. + + Returns the mapping of old -> new location. + """ + + +""" +Logging +""" + +logger: logging.Logger +"""The main plugin logger writing to `DECKY_PLUGIN_LOG`.""" diff --git a/backend/main.py b/backend/main.py index 46a0671a..4399fcca 100644 --- a/backend/main.py +++ b/backend/main.py @@ -1,4 +1,4 @@ -# This file is needed to make the relative imports in src/ work properly. +# This file is needed to make the relative imports in decky_loader/ work properly. if __name__ == "__main__": - from src.main import main + from decky_loader.main import main main() diff --git a/backend/poetry.lock b/backend/poetry.lock new file mode 100644 index 00000000..3f6d4622 --- /dev/null +++ b/backend/poetry.lock @@ -0,0 +1,802 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "aiohttp" +version = "3.8.6" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.6" +files = [ + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:41d55fc043954cddbbd82503d9cc3f4814a40bcef30b3569bc7b5e34130718c1"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1d84166673694841d8953f0a8d0c90e1087739d24632fe86b1a08819168b4566"}, + {file = "aiohttp-3.8.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:253bf92b744b3170eb4c4ca2fa58f9c4b87aeb1df42f71d4e78815e6e8b73c9e"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3fd194939b1f764d6bb05490987bfe104287bbf51b8d862261ccf66f48fb4096"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c5f938d199a6fdbdc10bbb9447496561c3a9a565b43be564648d81e1102ac22"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2817b2f66ca82ee699acd90e05c95e79bbf1dc986abb62b61ec8aaf851e81c93"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0fa375b3d34e71ccccf172cab401cd94a72de7a8cc01847a7b3386204093bb47"}, + {file = "aiohttp-3.8.6-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9de50a199b7710fa2904be5a4a9b51af587ab24c8e540a7243ab737b45844543"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e1d8cb0b56b3587c5c01de3bf2f600f186da7e7b5f7353d1bf26a8ddca57f965"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8e31e9db1bee8b4f407b77fd2507337a0a80665ad7b6c749d08df595d88f1cf5"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:7bc88fc494b1f0311d67f29fee6fd636606f4697e8cc793a2d912ac5b19aa38d"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ec00c3305788e04bf6d29d42e504560e159ccaf0be30c09203b468a6c1ccd3b2"}, + {file = "aiohttp-3.8.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ad1407db8f2f49329729564f71685557157bfa42b48f4b93e53721a16eb813ed"}, + {file = "aiohttp-3.8.6-cp310-cp310-win32.whl", hash = "sha256:ccc360e87341ad47c777f5723f68adbb52b37ab450c8bc3ca9ca1f3e849e5fe2"}, + {file = "aiohttp-3.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:93c15c8e48e5e7b89d5cb4613479d144fda8344e2d886cf694fd36db4cc86865"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e2f9cc8e5328f829f6e1fb74a0a3a939b14e67e80832975e01929e320386b34"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e6a00ffcc173e765e200ceefb06399ba09c06db97f401f920513a10c803604ca"}, + {file = "aiohttp-3.8.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:41bdc2ba359032e36c0e9de5a3bd00d6fb7ea558a6ce6b70acedf0da86458321"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14cd52ccf40006c7a6cd34a0f8663734e5363fd981807173faf3a017e202fec9"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2d5b785c792802e7b275c420d84f3397668e9d49ab1cb52bd916b3b3ffcf09ad"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1bed815f3dc3d915c5c1e556c397c8667826fbc1b935d95b0ad680787896a358"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96603a562b546632441926cd1293cfcb5b69f0b4159e6077f7c7dbdfb686af4d"}, + {file = "aiohttp-3.8.6-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d76e8b13161a202d14c9584590c4df4d068c9567c99506497bdd67eaedf36403"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e3f1e3f1a1751bb62b4a1b7f4e435afcdade6c17a4fd9b9d43607cebd242924a"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:76b36b3124f0223903609944a3c8bf28a599b2cc0ce0be60b45211c8e9be97f8"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:a2ece4af1f3c967a4390c284797ab595a9f1bc1130ef8b01828915a05a6ae684"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:16d330b3b9db87c3883e565340d292638a878236418b23cc8b9b11a054aaa887"}, + {file = "aiohttp-3.8.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:42c89579f82e49db436b69c938ab3e1559e5a4409eb8639eb4143989bc390f2f"}, + {file = "aiohttp-3.8.6-cp311-cp311-win32.whl", hash = "sha256:efd2fcf7e7b9d7ab16e6b7d54205beded0a9c8566cb30f09c1abe42b4e22bdcb"}, + {file = "aiohttp-3.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:3b2ab182fc28e7a81f6c70bfbd829045d9480063f5ab06f6e601a3eddbbd49a0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fdee8405931b0615220e5ddf8cd7edd8592c606a8e4ca2a00704883c396e4479"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d25036d161c4fe2225d1abff2bd52c34ed0b1099f02c208cd34d8c05729882f0"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d791245a894be071d5ab04bbb4850534261a7d4fd363b094a7b9963e8cdbd31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0cccd1de239afa866e4ce5c789b3032442f19c261c7d8a01183fd956b1935349"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f13f60d78224f0dace220d8ab4ef1dbc37115eeeab8c06804fec11bec2bbd07"}, + {file = "aiohttp-3.8.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a9b5a0606faca4f6cc0d338359d6fa137104c337f489cd135bb7fbdbccb1e39"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:13da35c9ceb847732bf5c6c5781dcf4780e14392e5d3b3c689f6d22f8e15ae31"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:4d4cbe4ffa9d05f46a28252efc5941e0462792930caa370a6efaf491f412bc66"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:229852e147f44da0241954fc6cb910ba074e597f06789c867cb7fb0621e0ba7a"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:713103a8bdde61d13490adf47171a1039fd880113981e55401a0f7b42c37d071"}, + {file = "aiohttp-3.8.6-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:45ad816b2c8e3b60b510f30dbd37fe74fd4a772248a52bb021f6fd65dff809b6"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win32.whl", hash = "sha256:2b8d4e166e600dcfbff51919c7a3789ff6ca8b3ecce16e1d9c96d95dd569eb4c"}, + {file = "aiohttp-3.8.6-cp36-cp36m-win_amd64.whl", hash = "sha256:0912ed87fee967940aacc5306d3aa8ba3a459fcd12add0b407081fbefc931e53"}, + {file = "aiohttp-3.8.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e2a988a0c673c2e12084f5e6ba3392d76c75ddb8ebc6c7e9ead68248101cd446"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ebf3fd9f141700b510d4b190094db0ce37ac6361a6806c153c161dc6c041ccda"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3161ce82ab85acd267c8f4b14aa226047a6bee1e4e6adb74b798bd42c6ae1f80"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d95fc1bf33a9a81469aa760617b5971331cdd74370d1214f0b3109272c0e1e3c"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c43ecfef7deaf0617cee936836518e7424ee12cb709883f2c9a1adda63cc460"}, + {file = "aiohttp-3.8.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca80e1b90a05a4f476547f904992ae81eda5c2c85c66ee4195bb8f9c5fb47f28"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:90c72ebb7cb3a08a7f40061079817133f502a160561d0675b0a6adf231382c92"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bb54c54510e47a8c7c8e63454a6acc817519337b2b78606c4e840871a3e15349"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:de6a1c9f6803b90e20869e6b99c2c18cef5cc691363954c93cb9adeb26d9f3ae"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:a3628b6c7b880b181a3ae0a0683698513874df63783fd89de99b7b7539e3e8a8"}, + {file = "aiohttp-3.8.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:fc37e9aef10a696a5a4474802930079ccfc14d9f9c10b4662169671ff034b7df"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win32.whl", hash = "sha256:f8ef51e459eb2ad8e7a66c1d6440c808485840ad55ecc3cafefadea47d1b1ba2"}, + {file = "aiohttp-3.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:b2fe42e523be344124c6c8ef32a011444e869dc5f883c591ed87f84339de5976"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:9e2ee0ac5a1f5c7dd3197de309adfb99ac4617ff02b0603fd1e65b07dc772e4b"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:01770d8c04bd8db568abb636c1fdd4f7140b284b8b3e0b4584f070180c1e5c62"}, + {file = "aiohttp-3.8.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3c68330a59506254b556b99a91857428cab98b2f84061260a67865f7f52899f5"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:89341b2c19fb5eac30c341133ae2cc3544d40d9b1892749cdd25892bbc6ac951"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71783b0b6455ac8f34b5ec99d83e686892c50498d5d00b8e56d47f41b38fbe04"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f628dbf3c91e12f4d6c8b3f092069567d8eb17814aebba3d7d60c149391aee3a"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b04691bc6601ef47c88f0255043df6f570ada1a9ebef99c34bd0b72866c217ae"}, + {file = "aiohttp-3.8.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ee912f7e78287516df155f69da575a0ba33b02dd7c1d6614dbc9463f43066e3"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9c19b26acdd08dd239e0d3669a3dddafd600902e37881f13fbd8a53943079dbc"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:99c5ac4ad492b4a19fc132306cd57075c28446ec2ed970973bbf036bcda1bcc6"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f0f03211fd14a6a0aed2997d4b1c013d49fb7b50eeb9ffdf5e51f23cfe2c77fa"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:8d399dade330c53b4106160f75f55407e9ae7505263ea86f2ccca6bfcbdb4921"}, + {file = "aiohttp-3.8.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ec4fd86658c6a8964d75426517dc01cbf840bbf32d055ce64a9e63a40fd7b771"}, + {file = "aiohttp-3.8.6-cp38-cp38-win32.whl", hash = "sha256:33164093be11fcef3ce2571a0dccd9041c9a93fa3bde86569d7b03120d276c6f"}, + {file = "aiohttp-3.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:bdf70bfe5a1414ba9afb9d49f0c912dc524cf60141102f3a11143ba3d291870f"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d52d5dc7c6682b720280f9d9db41d36ebe4791622c842e258c9206232251ab2b"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ac39027011414dbd3d87f7edb31680e1f430834c8cef029f11c66dad0670aa5"}, + {file = "aiohttp-3.8.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3f5c7ce535a1d2429a634310e308fb7d718905487257060e5d4598e29dc17f0b"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b30e963f9e0d52c28f284d554a9469af073030030cef8693106d918b2ca92f54"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:918810ef188f84152af6b938254911055a72e0f935b5fbc4c1a4ed0b0584aed1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:002f23e6ea8d3dd8d149e569fd580c999232b5fbc601c48d55398fbc2e582e8c"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fcf3eabd3fd1a5e6092d1242295fa37d0354b2eb2077e6eb670accad78e40e1"}, + {file = "aiohttp-3.8.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:255ba9d6d5ff1a382bb9a578cd563605aa69bec845680e21c44afc2670607a95"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d67f8baed00870aa390ea2590798766256f31dc5ed3ecc737debb6e97e2ede78"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:86f20cee0f0a317c76573b627b954c412ea766d6ada1a9fcf1b805763ae7feeb"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:39a312d0e991690ccc1a61f1e9e42daa519dcc34ad03eb6f826d94c1190190dd"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:e827d48cf802de06d9c935088c2924e3c7e7533377d66b6f31ed175c1620e05e"}, + {file = "aiohttp-3.8.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bd111d7fc5591ddf377a408ed9067045259ff2770f37e2d94e6478d0f3fc0c17"}, + {file = "aiohttp-3.8.6-cp39-cp39-win32.whl", hash = "sha256:caf486ac1e689dda3502567eb89ffe02876546599bbf915ec94b1fa424eeffd4"}, + {file = "aiohttp-3.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3f0e27e5b733803333bb2371249f41cf42bae8884863e8e8965ec69bebe53132"}, + {file = "aiohttp-3.8.6.tar.gz", hash = "sha256:b0cf2a4501bff9330a8a5248b4ce951851e415bdcce9dc158e76cfd55e15085c"}, +] + +[package.dependencies] +aiosignal = ">=1.1.2" +async-timeout = ">=4.0.0a3,<5.0" +attrs = ">=17.3.0" +charset-normalizer = ">=2.0,<4.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +yarl = ">=1.0,<2.0" + +[package.extras] +speedups = ["Brotli", "aiodns", "cchardet"] + +[[package]] +name = "aiohttp-cors" +version = "0.7.0" +description = "CORS support for aiohttp" +optional = false +python-versions = "*" +files = [ + {file = "aiohttp-cors-0.7.0.tar.gz", hash = "sha256:4d39c6d7100fd9764ed1caf8cebf0eb01bf5e3f24e2e073fda6234bc48b19f5d"}, + {file = "aiohttp_cors-0.7.0-py3-none-any.whl", hash = "sha256:0451ba59fdf6909d0e2cd21e4c0a43752bc0703d33fc78ae94d9d9321710193e"}, +] + +[package.dependencies] +aiohttp = ">=1.1" + +[[package]] +name = "aiohttp-jinja2" +version = "1.5.1" +description = "jinja2 template renderer for aiohttp.web (http server for asyncio)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiohttp-jinja2-1.5.1.tar.gz", hash = "sha256:8d149b2a57d91f794b33a394ea5bc66b567f38c74a5a6a9477afc2450f105c01"}, + {file = "aiohttp_jinja2-1.5.1-py3-none-any.whl", hash = "sha256:45cf00b80ab4dcc19515df13a929826eeb9698e76a3bcfd99112418751f5a061"}, +] + +[package.dependencies] +aiohttp = ">=3.6.3" +jinja2 = ">=3.0.0" + +[[package]] +name = "aiosignal" +version = "1.3.1" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.7" +files = [ + {file = "aiosignal-1.3.1-py3-none-any.whl", hash = "sha256:f8376fb07dd1e86a584e4fcdec80b36b7f81aac666ebc724e2c090300dd83b17"}, + {file = "aiosignal-1.3.1.tar.gz", hash = "sha256:54cd96e15e1649b75d6c87526a6ff0b6c1b0dd3459f43d9ca11d48c339b68cfc"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "altgraph" +version = "0.17.4" +description = "Python graph (network) package" +optional = false +python-versions = "*" +files = [ + {file = "altgraph-0.17.4-py2.py3-none-any.whl", hash = "sha256:642743b4750de17e655e6711601b077bc6598dbfa3ba5fa2b2a35ce12b508dff"}, + {file = "altgraph-0.17.4.tar.gz", hash = "sha256:1b5afbb98f6c4dcadb2e2ae6ab9fa994bbb8c1d75f4fa96d340f9437ae454406"}, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.7" +files = [ + {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, + {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, +] + +[[package]] +name = "attrs" +version = "23.1.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.7" +files = [ + {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, + {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, +] + +[package.extras] +cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]", "pre-commit"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] +tests = ["attrs[tests-no-zope]", "zope-interface"] +tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] + +[[package]] +name = "certifi" +version = "2023.7.22" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +files = [ + {file = "certifi-2023.7.22-py3-none-any.whl", hash = "sha256:92d6037539857d8206b8f6ae472e8b77db8058fec5937a1ef3f54304089edbb9"}, + {file = "certifi-2023.7.22.tar.gz", hash = "sha256:539cc1d13202e33ca466e88b2807e29f4c13049d6d87031a3c110744495cb082"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.3.2" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7.0" +files = [ + {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, + {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, + {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, + {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, + {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, + {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, + {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, + {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +] + +[[package]] +name = "frozenlist" +version = "1.4.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.8" +files = [ + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:764226ceef3125e53ea2cb275000e309c0aa5464d43bd72abd661e27fffc26ab"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6484756b12f40003c6128bfcc3fa9f0d49a687e171186c2d85ec82e3758c559"}, + {file = "frozenlist-1.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9ac08e601308e41eb533f232dbf6b7e4cea762f9f84f6357136eed926c15d12c"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d081f13b095d74b67d550de04df1c756831f3b83dc9881c38985834387487f1b"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:71932b597f9895f011f47f17d6428252fc728ba2ae6024e13c3398a087c2cdea"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:981b9ab5a0a3178ff413bca62526bb784249421c24ad7381e39d67981be2c326"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e41f3de4df3e80de75845d3e743b3f1c4c8613c3997a912dbf0229fc61a8b963"}, + {file = "frozenlist-1.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6918d49b1f90821e93069682c06ffde41829c346c66b721e65a5c62b4bab0300"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0e5c8764c7829343d919cc2dfc587a8db01c4f70a4ebbc49abde5d4b158b007b"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:8d0edd6b1c7fb94922bf569c9b092ee187a83f03fb1a63076e7774b60f9481a8"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:e29cda763f752553fa14c68fb2195150bfab22b352572cb36c43c47bedba70eb"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:0c7c1b47859ee2cac3846fde1c1dc0f15da6cec5a0e5c72d101e0f83dcb67ff9"}, + {file = "frozenlist-1.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:901289d524fdd571be1c7be054f48b1f88ce8dddcbdf1ec698b27d4b8b9e5d62"}, + {file = "frozenlist-1.4.0-cp310-cp310-win32.whl", hash = "sha256:1a0848b52815006ea6596c395f87449f693dc419061cc21e970f139d466dc0a0"}, + {file = "frozenlist-1.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:b206646d176a007466358aa21d85cd8600a415c67c9bd15403336c331a10d956"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:de343e75f40e972bae1ef6090267f8260c1446a1695e77096db6cfa25e759a95"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ad2a9eb6d9839ae241701d0918f54c51365a51407fd80f6b8289e2dfca977cc3"}, + {file = "frozenlist-1.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bd7bd3b3830247580de99c99ea2a01416dfc3c34471ca1298bccabf86d0ff4dc"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdf1847068c362f16b353163391210269e4f0569a3c166bc6a9f74ccbfc7e839"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:38461d02d66de17455072c9ba981d35f1d2a73024bee7790ac2f9e361ef1cd0c"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5a32087d720c608f42caed0ef36d2b3ea61a9d09ee59a5142d6070da9041b8f"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd65632acaf0d47608190a71bfe46b209719bf2beb59507db08ccdbe712f969b"}, + {file = "frozenlist-1.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:261b9f5d17cac914531331ff1b1d452125bf5daa05faf73b71d935485b0c510b"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b89ac9768b82205936771f8d2eb3ce88503b1556324c9f903e7156669f521472"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:008eb8b31b3ea6896da16c38c1b136cb9fec9e249e77f6211d479db79a4eaf01"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:e74b0506fa5aa5598ac6a975a12aa8928cbb58e1f5ac8360792ef15de1aa848f"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:490132667476f6781b4c9458298b0c1cddf237488abd228b0b3650e5ecba7467"}, + {file = "frozenlist-1.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:76d4711f6f6d08551a7e9ef28c722f4a50dd0fc204c56b4bcd95c6cc05ce6fbb"}, + {file = "frozenlist-1.4.0-cp311-cp311-win32.whl", hash = "sha256:a02eb8ab2b8f200179b5f62b59757685ae9987996ae549ccf30f983f40602431"}, + {file = "frozenlist-1.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:515e1abc578dd3b275d6a5114030b1330ba044ffba03f94091842852f806f1c1"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0ed05f5079c708fe74bf9027e95125334b6978bf07fd5ab923e9e55e5fbb9d3"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ca265542ca427bf97aed183c1676e2a9c66942e822b14dc6e5f42e038f92a503"}, + {file = "frozenlist-1.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:491e014f5c43656da08958808588cc6c016847b4360e327a62cb308c791bd2d9"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:17ae5cd0f333f94f2e03aaf140bb762c64783935cc764ff9c82dff626089bebf"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1e78fb68cf9c1a6aa4a9a12e960a5c9dfbdb89b3695197aa7064705662515de2"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d5655a942f5f5d2c9ed93d72148226d75369b4f6952680211972a33e59b1dfdc"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c11b0746f5d946fecf750428a95f3e9ebe792c1ee3b1e96eeba145dc631a9672"}, + {file = "frozenlist-1.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e66d2a64d44d50d2543405fb183a21f76b3b5fd16f130f5c99187c3fb4e64919"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:88f7bc0fcca81f985f78dd0fa68d2c75abf8272b1f5c323ea4a01a4d7a614efc"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:5833593c25ac59ede40ed4de6d67eb42928cca97f26feea219f21d0ed0959b79"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:fec520865f42e5c7f050c2a79038897b1c7d1595e907a9e08e3353293ffc948e"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:b826d97e4276750beca7c8f0f1a4938892697a6bcd8ec8217b3312dad6982781"}, + {file = "frozenlist-1.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ceb6ec0a10c65540421e20ebd29083c50e6d1143278746a4ef6bcf6153171eb8"}, + {file = "frozenlist-1.4.0-cp38-cp38-win32.whl", hash = "sha256:2b8bcf994563466db019fab287ff390fffbfdb4f905fc77bc1c1d604b1c689cc"}, + {file = "frozenlist-1.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:a6c8097e01886188e5be3e6b14e94ab365f384736aa1fca6a0b9e35bd4a30bc7"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6c38721585f285203e4b4132a352eb3daa19121a035f3182e08e437cface44bf"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0c6da9aee33ff0b1a451e867da0c1f47408112b3391dd43133838339e410963"}, + {file = "frozenlist-1.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:93ea75c050c5bb3d98016b4ba2497851eadf0ac154d88a67d7a6816206f6fa7f"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f61e2dc5ad442c52b4887f1fdc112f97caeff4d9e6ebe78879364ac59f1663e1"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aa384489fefeb62321b238e64c07ef48398fe80f9e1e6afeff22e140e0850eef"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:10ff5faaa22786315ef57097a279b833ecab1a0bfb07d604c9cbb1c4cdc2ed87"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:007df07a6e3eb3e33e9a1fe6a9db7af152bbd8a185f9aaa6ece10a3529e3e1c6"}, + {file = "frozenlist-1.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4f399d28478d1f604c2ff9119907af9726aed73680e5ed1ca634d377abb087"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:c5374b80521d3d3f2ec5572e05adc94601985cc526fb276d0c8574a6d749f1b3"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ce31ae3e19f3c902de379cf1323d90c649425b86de7bbdf82871b8a2a0615f3d"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7211ef110a9194b6042449431e08c4d80c0481e5891e58d429df5899690511c2"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:556de4430ce324c836789fa4560ca62d1591d2538b8ceb0b4f68fb7b2384a27a"}, + {file = "frozenlist-1.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7645a8e814a3ee34a89c4a372011dcd817964ce8cb273c8ed6119d706e9613e3"}, + {file = "frozenlist-1.4.0-cp39-cp39-win32.whl", hash = "sha256:19488c57c12d4e8095a922f328df3f179c820c212940a498623ed39160bc3c2f"}, + {file = "frozenlist-1.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:6221d84d463fb110bdd7619b69cb43878a11d51cbb9394ae3105d082d5199167"}, + {file = "frozenlist-1.4.0.tar.gz", hash = "sha256:09163bdf0b2907454042edb19f887c6d33806adc71fbd54afc14908bfdc22251"}, +] + +[[package]] +name = "idna" +version = "3.4" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.5" +files = [ + {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, + {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, +] + +[[package]] +name = "jinja2" +version = "3.1.2" +description = "A very fast and expressive template engine." +optional = false +python-versions = ">=3.7" +files = [ + {file = "Jinja2-3.1.2-py3-none-any.whl", hash = "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61"}, + {file = "Jinja2-3.1.2.tar.gz", hash = "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852"}, +] + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +name = "macholib" +version = "1.16.3" +description = "Mach-O header analysis and editing" +optional = false +python-versions = "*" +files = [ + {file = "macholib-1.16.3-py2.py3-none-any.whl", hash = "sha256:0e315d7583d38b8c77e815b1ecbdbf504a8258d8b3e17b61165c6feb60d18f2c"}, + {file = "macholib-1.16.3.tar.gz", hash = "sha256:07ae9e15e8e4cd9a788013d81f5908b3609aa76f9b1421bae9c4d7606ec86a30"}, +] + +[package.dependencies] +altgraph = ">=0.17" + +[[package]] +name = "markupsafe" +version = "2.1.3" +description = "Safely add untrusted strings to HTML/XML markup." +optional = false +python-versions = ">=3.7" +files = [ + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cd0f502fe016460680cd20aaa5a76d241d6f35a1c3350c474bac1273803893fa"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e09031c87a1e51556fdcb46e5bd4f59dfb743061cf93c4d6831bf894f125eb57"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68e78619a61ecf91e76aa3e6e8e33fc4894a2bebe93410754bd28fce0a8a4f9f"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c1a9bcdadc6c28eecee2c119465aebff8f7a584dd719facdd9e825ec61ab52"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:525808b8019e36eb524b8c68acdd63a37e75714eac50e988180b169d64480a00"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:962f82a3086483f5e5f64dbad880d31038b698494799b097bc59c2edf392fce6"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:aa7bd130efab1c280bed0f45501b7c8795f9fdbeb02e965371bbef3523627779"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c9c804664ebe8f83a211cace637506669e7890fec1b4195b505c214e50dd4eb7"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win32.whl", hash = "sha256:10bbfe99883db80bdbaff2dcf681dfc6533a614f700da1287707e8a5d78a8431"}, + {file = "MarkupSafe-2.1.3-cp310-cp310-win_amd64.whl", hash = "sha256:1577735524cdad32f9f694208aa75e422adba74f1baee7551620e43a3141f559"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ad9e82fb8f09ade1c3e1b996a6337afac2b8b9e365f926f5a61aacc71adc5b3c"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3c0fae6c3be832a0a0473ac912810b2877c8cb9d76ca48de1ed31e1c68386575"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b076b6226fb84157e3f7c971a47ff3a679d837cf338547532ab866c57930dbee"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfce63a9e7834b12b87c64d6b155fdd9b3b96191b6bd334bf37db7ff1fe457f2"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:338ae27d6b8745585f87218a3f23f1512dbf52c26c28e322dbe54bcede54ccb9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e4dd52d80b8c83fdce44e12478ad2e85c64ea965e75d66dbeafb0a3e77308fcc"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:df0be2b576a7abbf737b1575f048c23fb1d769f267ec4358296f31c2479db8f9"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:5bbe06f8eeafd38e5d0a4894ffec89378b6c6a625ff57e3028921f8ff59318ac"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win32.whl", hash = "sha256:dd15ff04ffd7e05ffcb7fe79f1b98041b8ea30ae9234aed2a9168b5797c3effb"}, + {file = "MarkupSafe-2.1.3-cp311-cp311-win_amd64.whl", hash = "sha256:134da1eca9ec0ae528110ccc9e48041e0828d79f24121a1a146161103c76e686"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8e254ae696c88d98da6555f5ace2279cf7cd5b3f52be2b5cf97feafe883b58d2"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb0932dc158471523c9637e807d9bfb93e06a95cbf010f1a38b98623b929ef2b"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9402b03f1a1b4dc4c19845e5c749e3ab82d5078d16a2a4c2cd2df62d57bb0707"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca379055a47383d02a5400cb0d110cef0a776fc644cda797db0c5696cfd7e18e"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:b7ff0f54cb4ff66dd38bebd335a38e2c22c41a8ee45aa608efc890ac3e3931bc"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c011a4149cfbcf9f03994ec2edffcb8b1dc2d2aede7ca243746df97a5d41ce48"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:56d9f2ecac662ca1611d183feb03a3fa4406469dafe241673d521dd5ae92a155"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win32.whl", hash = "sha256:8758846a7e80910096950b67071243da3e5a20ed2546e6392603c096778d48e0"}, + {file = "MarkupSafe-2.1.3-cp37-cp37m-win_amd64.whl", hash = "sha256:787003c0ddb00500e49a10f2844fac87aa6ce977b90b0feaaf9de23c22508b24"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:2ef12179d3a291be237280175b542c07a36e7f60718296278d8593d21ca937d4"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2c1b19b3aaacc6e57b7e25710ff571c24d6c3613a45e905b1fde04d691b98ee0"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8afafd99945ead6e075b973fefa56379c5b5c53fd8937dad92c662da5d8fd5ee"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8c41976a29d078bb235fea9b2ecd3da465df42a562910f9022f1a03107bd02be"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d080e0a5eb2529460b30190fcfcc4199bd7f827663f858a226a81bc27beaa97e"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:69c0f17e9f5a7afdf2cc9fb2d1ce6aabdb3bafb7f38017c0b77862bcec2bbad8"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:504b320cd4b7eff6f968eddf81127112db685e81f7e36e75f9f84f0df46041c3"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42de32b22b6b804f42c5d98be4f7e5e977ecdd9ee9b660fda1a3edf03b11792d"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win32.whl", hash = "sha256:ceb01949af7121f9fc39f7d27f91be8546f3fb112c608bc4029aef0bab86a2a5"}, + {file = "MarkupSafe-2.1.3-cp38-cp38-win_amd64.whl", hash = "sha256:1b40069d487e7edb2676d3fbdb2b0829ffa2cd63a2ec26c4938b2d34391b4ecc"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8023faf4e01efadfa183e863fefde0046de576c6f14659e8782065bcece22198"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b2b56950d93e41f33b4223ead100ea0fe11f8e6ee5f641eb753ce4b77a7042b"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9dcdfd0eaf283af041973bff14a2e143b8bd64e069f4c383416ecd79a81aab58"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:05fb21170423db021895e1ea1e1f3ab3adb85d1c2333cbc2310f2a26bc77272e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:282c2cb35b5b673bbcadb33a585408104df04f14b2d9b01d4c345a3b92861c2c"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab4a0df41e7c16a1392727727e7998a467472d0ad65f3ad5e6e765015df08636"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7ef3cb2ebbf91e330e3bb937efada0edd9003683db6b57bb108c4001f37a02ea"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:0a4e4a1aff6c7ac4cd55792abf96c915634c2b97e3cc1c7129578aa68ebd754e"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win32.whl", hash = "sha256:fec21693218efe39aa7f8599346e90c705afa52c5b31ae019b2e57e8f6542bb2"}, + {file = "MarkupSafe-2.1.3-cp39-cp39-win_amd64.whl", hash = "sha256:3fd4abcb888d15a94f32b75d8fd18ee162ca0c064f35b11134be77050296d6ba"}, + {file = "MarkupSafe-2.1.3.tar.gz", hash = "sha256:af598ed32d6ae86f1b747b82783958b1a4ab8f617b06fe68795c7f026abbdcad"}, +] + +[[package]] +name = "multidict" +version = "6.0.4" +description = "multidict implementation" +optional = false +python-versions = ">=3.7" +files = [ + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:0b1a97283e0c85772d613878028fec909f003993e1007eafa715b24b377cb9b8"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:eeb6dcc05e911516ae3d1f207d4b0520d07f54484c49dfc294d6e7d63b734171"}, + {file = "multidict-6.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d6d635d5209b82a3492508cf5b365f3446afb65ae7ebd755e70e18f287b0adf7"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c048099e4c9e9d615545e2001d3d8a4380bd403e1a0578734e0d31703d1b0c0b"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ea20853c6dbbb53ed34cb4d080382169b6f4554d394015f1bef35e881bf83547"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d232d4e5396c2efbbf4f6d4df89bfa905eb0d4dc5b3549d872ab898451f569"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36c63aaa167f6c6b04ef2c85704e93af16c11d20de1d133e39de6a0e84582a93"}, + {file = "multidict-6.0.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:64bdf1086b6043bf519869678f5f2757f473dee970d7abf6da91ec00acb9cb98"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:43644e38f42e3af682690876cff722d301ac585c5b9e1eacc013b7a3f7b696a0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7582a1d1030e15422262de9f58711774e02fa80df0d1578995c76214f6954988"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:ddff9c4e225a63a5afab9dd15590432c22e8057e1a9a13d28ed128ecf047bbdc"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:ee2a1ece51b9b9e7752e742cfb661d2a29e7bcdba2d27e66e28a99f1890e4fa0"}, + {file = "multidict-6.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a2e4369eb3d47d2034032a26c7a80fcb21a2cb22e1173d761a162f11e562caa5"}, + {file = "multidict-6.0.4-cp310-cp310-win32.whl", hash = "sha256:574b7eae1ab267e5f8285f0fe881f17efe4b98c39a40858247720935b893bba8"}, + {file = "multidict-6.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:4dcbb0906e38440fa3e325df2359ac6cb043df8e58c965bb45f4e406ecb162cc"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0dfad7a5a1e39c53ed00d2dd0c2e36aed4650936dc18fd9a1826a5ae1cad6f03"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:64da238a09d6039e3bd39bb3aee9c21a5e34f28bfa5aa22518581f910ff94af3"}, + {file = "multidict-6.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ff959bee35038c4624250473988b24f846cbeb2c6639de3602c073f10410ceba"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01a3a55bd90018c9c080fbb0b9f4891db37d148a0a18722b42f94694f8b6d4c9"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c5cb09abb18c1ea940fb99360ea0396f34d46566f157122c92dfa069d3e0e982"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:666daae833559deb2d609afa4490b85830ab0dfca811a98b70a205621a6109fe"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11bdf3f5e1518b24530b8241529d2050014c884cf18b6fc69c0c2b30ca248710"}, + {file = "multidict-6.0.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d18748f2d30f94f498e852c67d61261c643b349b9d2a581131725595c45ec6c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:458f37be2d9e4c95e2d8866a851663cbc76e865b78395090786f6cd9b3bbf4f4"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:b1a2eeedcead3a41694130495593a559a668f382eee0727352b9a41e1c45759a"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:7d6ae9d593ef8641544d6263c7fa6408cc90370c8cb2bbb65f8d43e5b0351d9c"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:5979b5632c3e3534e42ca6ff856bb24b2e3071b37861c2c727ce220d80eee9ed"}, + {file = "multidict-6.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:dcfe792765fab89c365123c81046ad4103fcabbc4f56d1c1997e6715e8015461"}, + {file = "multidict-6.0.4-cp311-cp311-win32.whl", hash = "sha256:3601a3cece3819534b11d4efc1eb76047488fddd0c85a3948099d5da4d504636"}, + {file = "multidict-6.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:81a4f0b34bd92df3da93315c6a59034df95866014ac08535fc819f043bfd51f0"}, + {file = "multidict-6.0.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:67040058f37a2a51ed8ea8f6b0e6ee5bd78ca67f169ce6122f3e2ec80dfe9b78"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:853888594621e6604c978ce2a0444a1e6e70c8d253ab65ba11657659dcc9100f"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:39ff62e7d0f26c248b15e364517a72932a611a9b75f35b45be078d81bdb86603"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af048912e045a2dc732847d33821a9d84ba553f5c5f028adbd364dd4765092ac"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1e8b901e607795ec06c9e42530788c45ac21ef3aaa11dbd0c69de543bfb79a9"}, + {file = "multidict-6.0.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62501642008a8b9871ddfccbf83e4222cf8ac0d5aeedf73da36153ef2ec222d2"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:99b76c052e9f1bc0721f7541e5e8c05db3941eb9ebe7b8553c625ef88d6eefde"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:509eac6cf09c794aa27bcacfd4d62c885cce62bef7b2c3e8b2e49d365b5003fe"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:21a12c4eb6ddc9952c415f24eef97e3e55ba3af61f67c7bc388dcdec1404a067"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:5cad9430ab3e2e4fa4a2ef4450f548768400a2ac635841bc2a56a2052cdbeb87"}, + {file = "multidict-6.0.4-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ab55edc2e84460694295f401215f4a58597f8f7c9466faec545093045476327d"}, + {file = "multidict-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:5a4dcf02b908c3b8b17a45fb0f15b695bf117a67b76b7ad18b73cf8e92608775"}, + {file = "multidict-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:6ed5f161328b7df384d71b07317f4d8656434e34591f20552c7bcef27b0ab88e"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5fc1b16f586f049820c5c5b17bb4ee7583092fa0d1c4e28b5239181ff9532e0c"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1502e24330eb681bdaa3eb70d6358e818e8e8f908a22a1851dfd4e15bc2f8161"}, + {file = "multidict-6.0.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b692f419760c0e65d060959df05f2a531945af31fda0c8a3b3195d4efd06de11"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45e1ecb0379bfaab5eef059f50115b54571acfbe422a14f668fc8c27ba410e7e"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ddd3915998d93fbcd2566ddf9cf62cdb35c9e093075f862935573d265cf8f65d"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:59d43b61c59d82f2effb39a93c48b845efe23a3852d201ed2d24ba830d0b4cf2"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cc8e1d0c705233c5dd0c5e6460fbad7827d5d36f310a0fadfd45cc3029762258"}, + {file = "multidict-6.0.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d6aa0418fcc838522256761b3415822626f866758ee0bc6632c9486b179d0b52"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6748717bb10339c4760c1e63da040f5f29f5ed6e59d76daee30305894069a660"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:4d1a3d7ef5e96b1c9e92f973e43aa5e5b96c659c9bc3124acbbd81b0b9c8a951"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4372381634485bec7e46718edc71528024fcdc6f835baefe517b34a33c731d60"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:fc35cb4676846ef752816d5be2193a1e8367b4c1397b74a565a9d0389c433a1d"}, + {file = "multidict-6.0.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:4b9d9e4e2b37daddb5c23ea33a3417901fa7c7b3dee2d855f63ee67a0b21e5b1"}, + {file = "multidict-6.0.4-cp38-cp38-win32.whl", hash = "sha256:e41b7e2b59679edfa309e8db64fdf22399eec4b0b24694e1b2104fb789207779"}, + {file = "multidict-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:d6c254ba6e45d8e72739281ebc46ea5eb5f101234f3ce171f0e9f5cc86991480"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:16ab77bbeb596e14212e7bab8429f24c1579234a3a462105cda4a66904998664"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc779e9e6f7fda81b3f9aa58e3a6091d49ad528b11ed19f6621408806204ad35"}, + {file = "multidict-6.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ceef517eca3e03c1cceb22030a3e39cb399ac86bff4e426d4fc6ae49052cc60"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:281af09f488903fde97923c7744bb001a9b23b039a909460d0f14edc7bf59706"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:52f2dffc8acaba9a2f27174c41c9e57f60b907bb9f096b36b1a1f3be71c6284d"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b41156839806aecb3641f3208c0dafd3ac7775b9c4c422d82ee2a45c34ba81ca"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3fc56f88cc98ef8139255cf8cd63eb2c586531e43310ff859d6bb3a6b51f1"}, + {file = "multidict-6.0.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8316a77808c501004802f9beebde51c9f857054a0c871bd6da8280e718444449"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:f70b98cd94886b49d91170ef23ec5c0e8ebb6f242d734ed7ed677b24d50c82cf"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bf6774e60d67a9efe02b3616fee22441d86fab4c6d335f9d2051d19d90a40063"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:e69924bfcdda39b722ef4d9aa762b2dd38e4632b3641b1d9a57ca9cd18f2f83a"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:6b181d8c23da913d4ff585afd1155a0e1194c0b50c54fcfe286f70cdaf2b7176"}, + {file = "multidict-6.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:52509b5be062d9eafc8170e53026fbc54cf3b32759a23d07fd935fb04fc22d95"}, + {file = "multidict-6.0.4-cp39-cp39-win32.whl", hash = "sha256:27c523fbfbdfd19c6867af7346332b62b586eed663887392cff78d614f9ec313"}, + {file = "multidict-6.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:33029f5734336aa0d4c0384525da0387ef89148dc7191aae00ca5fb23d7aafc2"}, + {file = "multidict-6.0.4.tar.gz", hash = "sha256:3666906492efb76453c0e7b97f2cf459b0682e7402c0489a95484965dbc1da49"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "pefile" +version = "2023.2.7" +description = "Python PE parsing module" +optional = false +python-versions = ">=3.6.0" +files = [ + {file = "pefile-2023.2.7-py3-none-any.whl", hash = "sha256:da185cd2af68c08a6cd4481f7325ed600a88f6a813bad9dea07ab3ef73d8d8d6"}, + {file = "pefile-2023.2.7.tar.gz", hash = "sha256:82e6114004b3d6911c77c3953e3838654b04511b8b66e8583db70c65998017dc"}, +] + +[[package]] +name = "pyinstaller" +version = "5.13.2" +description = "PyInstaller bundles a Python application and all its dependencies into a single package." +optional = false +python-versions = "<3.13,>=3.7" +files = [ + {file = "pyinstaller-5.13.2-py3-none-macosx_10_13_universal2.whl", hash = "sha256:16cbd66b59a37f4ee59373a003608d15df180a0d9eb1a29ff3bfbfae64b23d0f"}, + {file = "pyinstaller-5.13.2-py3-none-manylinux2014_aarch64.whl", hash = "sha256:8f6dd0e797ae7efdd79226f78f35eb6a4981db16c13325e962a83395c0ec7420"}, + {file = "pyinstaller-5.13.2-py3-none-manylinux2014_i686.whl", hash = "sha256:65133ed89467edb2862036b35d7c5ebd381670412e1e4361215e289c786dd4e6"}, + {file = "pyinstaller-5.13.2-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:7d51734423685ab2a4324ab2981d9781b203dcae42839161a9ee98bfeaabdade"}, + {file = "pyinstaller-5.13.2-py3-none-manylinux2014_s390x.whl", hash = "sha256:2c2fe9c52cb4577a3ac39626b84cf16cf30c2792f785502661286184f162ae0d"}, + {file = "pyinstaller-5.13.2-py3-none-manylinux2014_x86_64.whl", hash = "sha256:c63ef6133eefe36c4b2f4daf4cfea3d6412ece2ca218f77aaf967e52a95ac9b8"}, + {file = "pyinstaller-5.13.2-py3-none-musllinux_1_1_aarch64.whl", hash = "sha256:aadafb6f213549a5906829bb252e586e2cf72a7fbdb5731810695e6516f0ab30"}, + {file = "pyinstaller-5.13.2-py3-none-musllinux_1_1_x86_64.whl", hash = "sha256:b2e1c7f5cceb5e9800927ddd51acf9cc78fbaa9e79e822c48b0ee52d9ce3c892"}, + {file = "pyinstaller-5.13.2-py3-none-win32.whl", hash = "sha256:421cd24f26144f19b66d3868b49ed673176765f92fa9f7914cd2158d25b6d17e"}, + {file = "pyinstaller-5.13.2-py3-none-win_amd64.whl", hash = "sha256:ddcc2b36052a70052479a9e5da1af067b4496f43686ca3cdda99f8367d0627e4"}, + {file = "pyinstaller-5.13.2-py3-none-win_arm64.whl", hash = "sha256:27cd64e7cc6b74c5b1066cbf47d75f940b71356166031deb9778a2579bb874c6"}, + {file = "pyinstaller-5.13.2.tar.gz", hash = "sha256:c8e5d3489c3a7cc5f8401c2d1f48a70e588f9967e391c3b06ddac1f685f8d5d2"}, +] + +[package.dependencies] +altgraph = "*" +macholib = {version = ">=1.8", markers = "sys_platform == \"darwin\""} +pefile = {version = ">=2022.5.30", markers = "sys_platform == \"win32\""} +pyinstaller-hooks-contrib = ">=2021.4" +pywin32-ctypes = {version = ">=0.2.1", markers = "sys_platform == \"win32\""} +setuptools = ">=42.0.0" + +[package.extras] +encryption = ["tinyaes (>=1.0.0)"] +hook-testing = ["execnet (>=1.5.0)", "psutil", "pytest (>=2.7.3)"] + +[[package]] +name = "pyinstaller-hooks-contrib" +version = "2023.10" +description = "Community maintained hooks for PyInstaller" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyinstaller-hooks-contrib-2023.10.tar.gz", hash = "sha256:4b4a998036abb713774cb26534ca06b7e6e09e4c628196017a10deb11a48747f"}, + {file = "pyinstaller_hooks_contrib-2023.10-py2.py3-none-any.whl", hash = "sha256:6dc1786a8f452941245d5bb85893e2a33632ebdcbc4c23eea41f2ee08281b0c0"}, +] + +[[package]] +name = "pyright" +version = "1.1.335" +description = "Command line wrapper for pyright" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pyright-1.1.335-py3-none-any.whl", hash = "sha256:1149d99d5cea3997010a5ac39611534e0426125d5090913ae5cb1e0e2c9fbca3"}, + {file = "pyright-1.1.335.tar.gz", hash = "sha256:12c09c1644b223515cc342f7d383e55eefeedd730d7875e39a2cf338c2d99be4"}, +] + +[package.dependencies] +nodeenv = ">=1.6.0" + +[package.extras] +all = ["twine (>=3.4.1)"] +dev = ["twine (>=3.4.1)"] + +[[package]] +name = "pywin32-ctypes" +version = "0.2.2" +description = "A (partial) reimplementation of pywin32 using ctypes/cffi" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pywin32-ctypes-0.2.2.tar.gz", hash = "sha256:3426e063bdd5fd4df74a14fa3cf80a0b42845a87e1d1e81f6549f9daec593a60"}, + {file = "pywin32_ctypes-0.2.2-py3-none-any.whl", hash = "sha256:bf490a1a709baf35d688fe0ecf980ed4de11d2b3e37b51e5442587a75d9957e7"}, +] + +[[package]] +name = "setuptools" +version = "68.2.2" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.8" +files = [ + {file = "setuptools-68.2.2-py3-none-any.whl", hash = "sha256:b454a35605876da60632df1a60f736524eb73cc47bbc9f3f1ef1b644de74fd2a"}, + {file = "setuptools-68.2.2.tar.gz", hash = "sha256:4ac1475276d2f1c48684874089fefcd83bd7162ddaafb81fac866ba0db282a87"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "watchdog" +version = "2.3.1" +description = "Filesystem events monitoring" +optional = false +python-versions = ">=3.6" +files = [ + {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d1f1200d4ec53b88bf04ab636f9133cb703eb19768a39351cee649de21a33697"}, + {file = "watchdog-2.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:564e7739abd4bd348aeafbf71cc006b6c0ccda3160c7053c4a53b67d14091d42"}, + {file = "watchdog-2.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:95ad708a9454050a46f741ba5e2f3468655ea22da1114e4c40b8cbdaca572565"}, + {file = "watchdog-2.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:a073c91a6ef0dda488087669586768195c3080c66866144880f03445ca23ef16"}, + {file = "watchdog-2.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa8b028750b43e80eea9946d01925168eeadb488dfdef1d82be4b1e28067f375"}, + {file = "watchdog-2.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:964fd236cd443933268ae49b59706569c8b741073dbfd7ca705492bae9d39aab"}, + {file = "watchdog-2.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:91fd146d723392b3e6eb1ac21f122fcce149a194a2ba0a82c5e4d0ee29cd954c"}, + {file = "watchdog-2.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efe3252137392a471a2174d721e1037a0e6a5da7beb72a021e662b7000a9903f"}, + {file = "watchdog-2.3.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:85bf2263290591b7c5fa01140601b64c831be88084de41efbcba6ea289874f44"}, + {file = "watchdog-2.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8f2df370cd8e4e18499dd0bfdef476431bcc396108b97195d9448d90924e3131"}, + {file = "watchdog-2.3.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ea5d86d1bcf4a9d24610aa2f6f25492f441960cf04aed2bd9a97db439b643a7b"}, + {file = "watchdog-2.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:6f5d0f7eac86807275eba40b577c671b306f6f335ba63a5c5a348da151aba0fc"}, + {file = "watchdog-2.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b848c71ef2b15d0ef02f69da8cc120d335cec0ed82a3fa7779e27a5a8527225"}, + {file = "watchdog-2.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0d9878be36d2b9271e3abaa6f4f051b363ff54dbbe7e7df1af3c920e4311ee43"}, + {file = "watchdog-2.3.1-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4cd61f98cb37143206818cb1786d2438626aa78d682a8f2ecee239055a9771d5"}, + {file = "watchdog-2.3.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3d2dbcf1acd96e7a9c9aefed201c47c8e311075105d94ce5e899f118155709fd"}, + {file = "watchdog-2.3.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:03f342a9432fe08107defbe8e405a2cb922c5d00c4c6c168c68b633c64ce6190"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7a596f9415a378d0339681efc08d2249e48975daae391d58f2e22a3673b977cf"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:0e1dd6d449267cc7d6935d7fe27ee0426af6ee16578eed93bacb1be9ff824d2d"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_i686.whl", hash = "sha256:7a1876f660e32027a1a46f8a0fa5747ad4fcf86cb451860eae61a26e102c8c79"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:2caf77ae137935c1466f8cefd4a3aec7017b6969f425d086e6a528241cba7256"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:53f3e95081280898d9e4fc51c5c69017715929e4eea1ab45801d5e903dd518ad"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:9da7acb9af7e4a272089bd2af0171d23e0d6271385c51d4d9bde91fe918c53ed"}, + {file = "watchdog-2.3.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:8a4d484e846dcd75e96b96d80d80445302621be40e293bfdf34a631cab3b33dc"}, + {file = "watchdog-2.3.1-py3-none-win32.whl", hash = "sha256:a74155398434937ac2780fd257c045954de5b11b5c52fc844e2199ce3eecf4cf"}, + {file = "watchdog-2.3.1-py3-none-win_amd64.whl", hash = "sha256:5defe4f0918a2a1a4afbe4dbb967f743ac3a93d546ea4674567806375b024adb"}, + {file = "watchdog-2.3.1-py3-none-win_ia64.whl", hash = "sha256:4109cccf214b7e3462e8403ab1e5b17b302ecce6c103eb2fc3afa534a7f27b96"}, + {file = "watchdog-2.3.1.tar.gz", hash = "sha256:d9f9ed26ed22a9d331820a8432c3680707ea8b54121ddcc9dc7d9f2ceeb36906"}, +] + +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] + +[[package]] +name = "yarl" +version = "1.9.2" +description = "Yet another URL library" +optional = false +python-versions = ">=3.7" +files = [ + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:8c2ad583743d16ddbdf6bb14b5cd76bf43b0d0006e918809d5d4ddf7bde8dd82"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:82aa6264b36c50acfb2424ad5ca537a2060ab6de158a5bd2a72a032cc75b9eb8"}, + {file = "yarl-1.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c0c77533b5ed4bcc38e943178ccae29b9bcf48ffd1063f5821192f23a1bd27b9"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee4afac41415d52d53a9833ebae7e32b344be72835bbb589018c9e938045a560"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9bf345c3a4f5ba7f766430f97f9cc1320786f19584acc7086491f45524a551ac"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2a96c19c52ff442a808c105901d0bdfd2e28575b3d5f82e2f5fd67e20dc5f4ea"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:891c0e3ec5ec881541f6c5113d8df0315ce5440e244a716b95f2525b7b9f3608"}, + {file = "yarl-1.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c3a53ba34a636a256d767c086ceb111358876e1fb6b50dfc4d3f4951d40133d5"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:566185e8ebc0898b11f8026447eacd02e46226716229cea8db37496c8cdd26e0"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:2b0738fb871812722a0ac2154be1f049c6223b9f6f22eec352996b69775b36d4"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:32f1d071b3f362c80f1a7d322bfd7b2d11e33d2adf395cc1dd4df36c9c243095"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:e9fdc7ac0d42bc3ea78818557fab03af6181e076a2944f43c38684b4b6bed8e3"}, + {file = "yarl-1.9.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:56ff08ab5df8429901ebdc5d15941b59f6253393cb5da07b4170beefcf1b2528"}, + {file = "yarl-1.9.2-cp310-cp310-win32.whl", hash = "sha256:8ea48e0a2f931064469bdabca50c2f578b565fc446f302a79ba6cc0ee7f384d3"}, + {file = "yarl-1.9.2-cp310-cp310-win_amd64.whl", hash = "sha256:50f33040f3836e912ed16d212f6cc1efb3231a8a60526a407aeb66c1c1956dde"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:646d663eb2232d7909e6601f1a9107e66f9791f290a1b3dc7057818fe44fc2b6"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aff634b15beff8902d1f918012fc2a42e0dbae6f469fce134c8a0dc51ca423bb"}, + {file = "yarl-1.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a83503934c6273806aed765035716216cc9ab4e0364f7f066227e1aaea90b8d0"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b25322201585c69abc7b0e89e72790469f7dad90d26754717f3310bfe30331c2"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:22a94666751778629f1ec4280b08eb11815783c63f52092a5953faf73be24191"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ec53a0ea2a80c5cd1ab397925f94bff59222aa3cf9c6da938ce05c9ec20428d"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:159d81f22d7a43e6eabc36d7194cb53f2f15f498dbbfa8edc8a3239350f59fe7"}, + {file = "yarl-1.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:832b7e711027c114d79dffb92576acd1bd2decc467dec60e1cac96912602d0e6"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:95d2ecefbcf4e744ea952d073c6922e72ee650ffc79028eb1e320e732898d7e8"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d4e2c6d555e77b37288eaf45b8f60f0737c9efa3452c6c44626a5455aeb250b9"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:783185c75c12a017cc345015ea359cc801c3b29a2966c2655cd12b233bf5a2be"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:b8cc1863402472f16c600e3e93d542b7e7542a540f95c30afd472e8e549fc3f7"}, + {file = "yarl-1.9.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:822b30a0f22e588b32d3120f6d41e4ed021806418b4c9f0bc3048b8c8cb3f92a"}, + {file = "yarl-1.9.2-cp311-cp311-win32.whl", hash = "sha256:a60347f234c2212a9f0361955007fcf4033a75bf600a33c88a0a8e91af77c0e8"}, + {file = "yarl-1.9.2-cp311-cp311-win_amd64.whl", hash = "sha256:be6b3fdec5c62f2a67cb3f8c6dbf56bbf3f61c0f046f84645cd1ca73532ea051"}, + {file = "yarl-1.9.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:38a3928ae37558bc1b559f67410df446d1fbfa87318b124bf5032c31e3447b74"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac9bb4c5ce3975aeac288cfcb5061ce60e0d14d92209e780c93954076c7c4367"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3da8a678ca8b96c8606bbb8bfacd99a12ad5dd288bc6f7979baddd62f71c63ef"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:13414591ff516e04fcdee8dc051c13fd3db13b673c7a4cb1350e6b2ad9639ad3"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf74d08542c3a9ea97bb8f343d4fcbd4d8f91bba5ec9d5d7f792dbe727f88938"}, + {file = "yarl-1.9.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7221580dc1db478464cfeef9b03b95c5852cc22894e418562997df0d074ccc"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:494053246b119b041960ddcd20fd76224149cfea8ed8777b687358727911dd33"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:52a25809fcbecfc63ac9ba0c0fb586f90837f5425edfd1ec9f3372b119585e45"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:e65610c5792870d45d7b68c677681376fcf9cc1c289f23e8e8b39c1485384185"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:1b1bba902cba32cdec51fca038fd53f8beee88b77efc373968d1ed021024cc04"}, + {file = "yarl-1.9.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:662e6016409828ee910f5d9602a2729a8a57d74b163c89a837de3fea050c7582"}, + {file = "yarl-1.9.2-cp37-cp37m-win32.whl", hash = "sha256:f364d3480bffd3aa566e886587eaca7c8c04d74f6e8933f3f2c996b7f09bee1b"}, + {file = "yarl-1.9.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6a5883464143ab3ae9ba68daae8e7c5c95b969462bbe42e2464d60e7e2698368"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5610f80cf43b6202e2c33ba3ec2ee0a2884f8f423c8f4f62906731d876ef4fac"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b9a4e67ad7b646cd6f0938c7ebfd60e481b7410f574c560e455e938d2da8e0f4"}, + {file = "yarl-1.9.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:83fcc480d7549ccebe9415d96d9263e2d4226798c37ebd18c930fce43dfb9574"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fcd436ea16fee7d4207c045b1e340020e58a2597301cfbcfdbe5abd2356c2fb"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:84e0b1599334b1e1478db01b756e55937d4614f8654311eb26012091be109d59"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3458a24e4ea3fd8930e934c129b676c27452e4ebda80fbe47b56d8c6c7a63a9e"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:838162460b3a08987546e881a2bfa573960bb559dfa739e7800ceeec92e64417"}, + {file = "yarl-1.9.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4e2d08f07a3d7d3e12549052eb5ad3eab1c349c53ac51c209a0e5991bbada78"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:de119f56f3c5f0e2fb4dee508531a32b069a5f2c6e827b272d1e0ff5ac040333"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:149ddea5abf329752ea5051b61bd6c1d979e13fbf122d3a1f9f0c8be6cb6f63c"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:674ca19cbee4a82c9f54e0d1eee28116e63bc6fd1e96c43031d11cbab8b2afd5"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:9b3152f2f5677b997ae6c804b73da05a39daa6a9e85a512e0e6823d81cdad7cc"}, + {file = "yarl-1.9.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5415d5a4b080dc9612b1b63cba008db84e908b95848369aa1da3686ae27b6d2b"}, + {file = "yarl-1.9.2-cp38-cp38-win32.whl", hash = "sha256:f7a3d8146575e08c29ed1cd287068e6d02f1c7bdff8970db96683b9591b86ee7"}, + {file = "yarl-1.9.2-cp38-cp38-win_amd64.whl", hash = "sha256:63c48f6cef34e6319a74c727376e95626f84ea091f92c0250a98e53e62c77c72"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:75df5ef94c3fdc393c6b19d80e6ef1ecc9ae2f4263c09cacb178d871c02a5ba9"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c027a6e96ef77d401d8d5a5c8d6bc478e8042f1e448272e8d9752cb0aff8b5c8"}, + {file = "yarl-1.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3b078dbe227f79be488ffcfc7a9edb3409d018e0952cf13f15fd6512847f3f7"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59723a029760079b7d991a401386390c4be5bfec1e7dd83e25a6a0881859e716"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b03917871bf859a81ccb180c9a2e6c1e04d2f6a51d953e6a5cdd70c93d4e5a2a"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c1012fa63eb6c032f3ce5d2171c267992ae0c00b9e164efe4d73db818465fac3"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a74dcbfe780e62f4b5a062714576f16c2f3493a0394e555ab141bf0d746bb955"}, + {file = "yarl-1.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8c56986609b057b4839968ba901944af91b8e92f1725d1a2d77cbac6972b9ed1"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2c315df3293cd521033533d242d15eab26583360b58f7ee5d9565f15fee1bef4"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:b7232f8dfbd225d57340e441d8caf8652a6acd06b389ea2d3222b8bc89cbfca6"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:53338749febd28935d55b41bf0bcc79d634881195a39f6b2f767870b72514caf"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:066c163aec9d3d073dc9ffe5dd3ad05069bcb03fcaab8d221290ba99f9f69ee3"}, + {file = "yarl-1.9.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8288d7cd28f8119b07dd49b7230d6b4562f9b61ee9a4ab02221060d21136be80"}, + {file = "yarl-1.9.2-cp39-cp39-win32.whl", hash = "sha256:b124e2a6d223b65ba8768d5706d103280914d61f5cae3afbc50fc3dfcc016623"}, + {file = "yarl-1.9.2-cp39-cp39-win_amd64.whl", hash = "sha256:61016e7d582bc46a5378ffdd02cd0314fb8ba52f40f9cf4d9a5e7dbef88dee18"}, + {file = "yarl-1.9.2.tar.gz", hash = "sha256:04ab9d4b9f587c06d801c2abfe9317b77cdf996c65a90d5e84ecc45010823571"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" + +[metadata] +lock-version = "2.0" +python-versions = ">=3.10,<3.13" +content-hash = "acc0fcc8adcba521e2bb9f6eba2d3b047b1fe045d0019c0766ed7dc3fc804253" diff --git a/backend/pyinstaller.spec b/backend/pyinstaller.spec new file mode 100644 index 00000000..67bdc26e --- /dev/null +++ b/backend/pyinstaller.spec @@ -0,0 +1,30 @@ +import os +from PyInstaller.building.build_main import Analysis +from PyInstaller.building.api import EXE, PYZ +from PyInstaller.utils.hooks import copy_metadata + +a = Analysis( + ['main.py'], + datas=[ + ('locales', 'locales'), + ('static', 'static'), + ] + copy_metadata('decky_loader'), + hiddenimports=['logging.handlers', 'sqlite3', 'decky_plugin'], +) +pyz = PYZ(a.pure, a.zipped_data) + +noconsole = bool(os.getenv('DECKY_NOCONSOLE')) +name = "PluginLoader" +if noconsole: + name += "_noconsole" + +exe = EXE( + pyz, + a.scripts, + a.binaries, + a.zipfiles, + a.datas, + name=name, + upx=True, + console=not noconsole, +) diff --git a/backend/pyproject.toml b/backend/pyproject.toml new file mode 100644 index 00000000..5ead2d15 --- /dev/null +++ b/backend/pyproject.toml @@ -0,0 +1,36 @@ +[tool.poetry] +name = "decky-loader" +version = "0.0.0" # the real version will be autogenerated +description = "A plugin loader for the Steam Deck" +license = "GPLv2" +readme = "../README.md" +authors = [] +packages = [ + {include = "decky_loader"}, + {include = "decky_plugin.py"}, + {include = "decky_plugin.pyi"}, +] +include = ["decky_loader/static/*"] + +[tool.poetry.dependencies] +python = ">=3.10,<3.13" + +aiohttp = "^3.8.5" +aiohttp-jinja2 = "^1.5.1" +aiohttp-cors = "^0.7.0" +watchdog = "^2.1.7" +certifi = "*" + +[tool.poetry.group.dev.dependencies] +pyinstaller = "^5.13.0" +pyright = "^1.1.335" + +[tool.poetry.scripts] +decky-loader = 'decky_loader.main:main' + +[tool.pyright] +strict = ["*"] + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/backend/pyrightconfig.json b/backend/pyrightconfig.json deleted file mode 100644 index 9937f227..00000000 --- a/backend/pyrightconfig.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "strict": ["*"] -} \ No newline at end of file diff --git a/backend/requirements.txt b/backend/requirements.txt deleted file mode 100644 index 326a924c..00000000 --- a/backend/requirements.txt +++ /dev/null @@ -1,5 +0,0 @@ -aiohttp==3.8.4 -aiohttp-jinja2==1.5.1 -aiohttp_cors==0.7.0 -watchdog==2.1.7 -certifi==2023.7.22 diff --git a/backend/src/browser.py b/backend/src/browser.py deleted file mode 100644 index 7260db8e..00000000 --- a/backend/src/browser.py +++ /dev/null @@ -1,275 +0,0 @@ -# Full imports -import json -# import pprint -# from pprint import pformat - -# Partial imports -from aiohttp import ClientSession -from asyncio import sleep -from hashlib import sha256 -from io import BytesIO -from logging import getLogger -from os import R_OK, W_OK, path, listdir, access, mkdir -from shutil import rmtree -from time import time -from zipfile import ZipFile -from enum import IntEnum -from typing import Dict, List, TypedDict - -# Local modules -from .localplatform.localplatform import chown, chmod -from .loader import Loader, Plugins -from .helpers import get_ssl_context, download_remote_binary_to_path -from .settings import SettingsManager -from .injector import get_gamepadui_tab - -logger = getLogger("Browser") - -class PluginInstallType(IntEnum): - INSTALL = 0 - REINSTALL = 1 - UPDATE = 2 - -class PluginInstallRequest(TypedDict): - name: str - artifact: str - version: str - hash: str - install_type: PluginInstallType - -class PluginInstallContext: - def __init__(self, artifact: str, name: str, version: str, hash: str) -> None: - self.artifact = artifact - self.name = name - self.version = version - self.hash = hash - -class PluginBrowser: - def __init__(self, plugin_path: str, plugins: Plugins, loader: Loader, settings: SettingsManager) -> None: - self.plugin_path = plugin_path - self.plugins = plugins - self.loader = loader - self.settings = settings - self.install_requests: Dict[str, PluginInstallContext | List[PluginInstallContext]] = {} - - def _unzip_to_plugin_dir(self, zip: BytesIO, name: str, hash: str): - zip_hash = sha256(zip.getbuffer()).hexdigest() - if hash and (zip_hash != hash): - return False - zip_file = ZipFile(zip) - zip_file.extractall(self.plugin_path) - plugin_folder = self.find_plugin_folder(name) - assert plugin_folder is not None - plugin_dir = path.join(self.plugin_path, plugin_folder) - - if not chown(plugin_dir) or not chmod(plugin_dir, 555): - logger.error(f"chown/chmod exited with a non-zero exit code") - return False - return True - - async def _download_remote_binaries_for_plugin_with_name(self, pluginBasePath: str): - rv = False - try: - packageJsonPath = path.join(pluginBasePath, 'package.json') - pluginBinPath = path.join(pluginBasePath, 'bin') - - if access(packageJsonPath, R_OK): - with open(packageJsonPath, "r", encoding="utf-8") as f: - packageJson = json.load(f) - if "remote_binary" in packageJson and len(packageJson["remote_binary"]) > 0: - # create bin directory if needed. - chmod(pluginBasePath, 777) - if access(pluginBasePath, W_OK): - if not path.exists(pluginBinPath): - mkdir(pluginBinPath) - if not access(pluginBinPath, W_OK): - chmod(pluginBinPath, 777) - - rv = True - for remoteBinary in packageJson["remote_binary"]: - # Required Fields. If any Remote Binary is missing these fail the install. - binName = remoteBinary["name"] - binURL = remoteBinary["url"] - binHash = remoteBinary["sha256hash"] - if not await download_remote_binary_to_path(binURL, binHash, path.join(pluginBinPath, binName)): - rv = False - raise Exception(f"Error Downloading Remote Binary {binName}@{binURL} with hash {binHash} to {path.join(pluginBinPath, binName)}") - - chown(self.plugin_path) - chmod(pluginBasePath, 555) - else: - rv = True - logger.debug(f"No Remote Binaries to Download") - - except Exception as e: - rv = False - logger.debug(str(e)) - - return rv - - """Return the filename (only) for the specified plugin""" - def find_plugin_folder(self, name: str) -> str | None: - for folder in listdir(self.plugin_path): - try: - with open(path.join(self.plugin_path, folder, 'plugin.json'), "r", encoding="utf-8") as f: - plugin = json.load(f) - - if plugin['name'] == name: - return folder - except: - logger.debug(f"skipping {folder}") - - async def uninstall_plugin(self, name: str): - if self.loader.watcher: - self.loader.watcher.disabled = True - tab = await get_gamepadui_tab() - plugin_folder = self.find_plugin_folder(name) - assert plugin_folder is not None - plugin_dir = path.join(self.plugin_path, plugin_folder) - try: - logger.info("uninstalling " + name) - logger.info(" at dir " + plugin_dir) - logger.debug("calling frontend unload for %s" % str(name)) - res = await tab.evaluate_js(f"DeckyPluginLoader.unloadPlugin('{name}')") - logger.debug("result of unload from UI: %s", res) - # plugins_snapshot = self.plugins.copy() - # snapshot_string = pformat(plugins_snapshot) - # logger.debug("current plugins: %s", snapshot_string) - if name in self.plugins: - logger.debug("Plugin %s was found", name) - self.plugins[name].stop() - logger.debug("Plugin %s was stopped", name) - del self.plugins[name] - logger.debug("Plugin %s was removed from the dictionary", name) - self.cleanup_plugin_settings(name) - logger.debug("removing files %s" % str(name)) - rmtree(plugin_dir) - except FileNotFoundError: - logger.warning(f"Plugin {name} not installed, skipping uninstallation") - except Exception as e: - logger.error(f"Plugin {name} in {plugin_dir} was not uninstalled") - logger.error(f"Error at {str(e)}", exc_info=e) - if self.loader.watcher: - self.loader.watcher.disabled = False - - async def _install(self, artifact: str, name: str, version: str, hash: str): - # Will be set later in code - res_zip = None - - # Check if plugin is installed - isInstalled = False - # Preserve plugin order before removing plugin (uninstall alters the order and removes the plugin from the list) - current_plugin_order = self.settings.getSetting("pluginOrder")[:] - if self.loader.watcher: - self.loader.watcher.disabled = True - try: - pluginFolderPath = self.find_plugin_folder(name) - if pluginFolderPath: - isInstalled = True - except: - logger.error(f"Failed to determine if {name} is already installed, continuing anyway.") - - # Check if the file is a local file or a URL - if artifact.startswith("file://"): - logger.info(f"Installing {name} from local ZIP file (Version: {version})") - res_zip = BytesIO(open(artifact[7:], "rb").read()) - else: - logger.info(f"Installing {name} from URL (Version: {version})") - async with ClientSession() as client: - logger.debug(f"Fetching {artifact}") - res = await client.get(artifact, ssl=get_ssl_context()) - if res.status == 200: - logger.debug("Got 200. Reading...") - data = await res.read() - logger.debug(f"Read {len(data)} bytes") - res_zip = BytesIO(data) - else: - logger.fatal(f"Could not fetch from URL. {await res.text()}") - - # Check to make sure we got the file - if res_zip is None: - logger.fatal(f"Could not fetch {artifact}") - return - - # If plugin is installed, uninstall it - if isInstalled: - try: - logger.debug("Uninstalling existing plugin...") - await self.uninstall_plugin(name) - except: - logger.error(f"Plugin {name} could not be uninstalled.") - - # Install the plugin - logger.debug("Unzipping...") - ret = self._unzip_to_plugin_dir(res_zip, name, hash) - if ret: - plugin_folder = self.find_plugin_folder(name) - assert plugin_folder is not None - plugin_dir = path.join(self.plugin_path, plugin_folder) - ret = await self._download_remote_binaries_for_plugin_with_name(plugin_dir) - if ret: - logger.info(f"Installed {name} (Version: {version})") - if name in self.loader.plugins: - self.loader.plugins[name].stop() - self.loader.plugins.pop(name, None) - await sleep(1) - if not isInstalled: - current_plugin_order = self.settings.getSetting("pluginOrder") - current_plugin_order.append(name) - self.settings.setSetting("pluginOrder", current_plugin_order) - logger.debug("Plugin %s was added to the pluginOrder setting", name) - self.loader.import_plugin(path.join(plugin_dir, "main.py"), plugin_folder) - else: - logger.fatal(f"Failed Downloading Remote Binaries") - else: - logger.fatal(f"SHA-256 Mismatch!!!! {name} (Version: {version})") - if self.loader.watcher: - self.loader.watcher.disabled = False - - async def request_plugin_install(self, artifact: str, name: str, version: str, hash: str, install_type: PluginInstallType): - request_id = str(time()) - self.install_requests[request_id] = PluginInstallContext(artifact, name, version, hash) - tab = await get_gamepadui_tab() - await tab.open_websocket() - await tab.evaluate_js(f"DeckyPluginLoader.addPluginInstallPrompt('{name}', '{version}', '{request_id}', '{hash}', {install_type})") - - async def request_multiple_plugin_installs(self, requests: List[PluginInstallRequest]): - request_id = str(time()) - self.install_requests[request_id] = [PluginInstallContext(req['artifact'], req['name'], req['version'], req['hash']) for req in requests] - js_requests_parameter = ','.join([ - f"{{ name: '{req['name']}', version: '{req['version']}', hash: '{req['hash']}', install_type: {req['install_type']}}}" for req in requests - ]) - - tab = await get_gamepadui_tab() - await tab.open_websocket() - await tab.evaluate_js(f"DeckyPluginLoader.addMultiplePluginsInstallPrompt('{request_id}', [{js_requests_parameter}])") - - async def confirm_plugin_install(self, request_id: str): - requestOrRequests = self.install_requests.pop(request_id) - if isinstance(requestOrRequests, list): - [await self._install(req.artifact, req.name, req.version, req.hash) for req in requestOrRequests] - else: - await self._install(requestOrRequests.artifact, requestOrRequests.name, requestOrRequests.version, requestOrRequests.hash) - - def cancel_plugin_install(self, request_id: str): - self.install_requests.pop(request_id) - - def cleanup_plugin_settings(self, name: str): - """Removes any settings related to a plugin. Propably called when a plugin is uninstalled. - - Args: - name (string): The name of the plugin - """ - hidden_plugins = self.settings.getSetting("hiddenPlugins", []) - if name in hidden_plugins: - hidden_plugins.remove(name) - self.settings.setSetting("hiddenPlugins", hidden_plugins) - - - plugin_order = self.settings.getSetting("pluginOrder", []) - - if name in plugin_order: - plugin_order.remove(name) - self.settings.setSetting("pluginOrder", plugin_order) - - logger.debug("Removed any settings for plugin %s", name) diff --git a/backend/src/customtypes.py b/backend/src/customtypes.py deleted file mode 100644 index 84ebc235..00000000 --- a/backend/src/customtypes.py +++ /dev/null @@ -1,6 +0,0 @@ -from enum import Enum - -class UserType(Enum): - HOST_USER = 1 - EFFECTIVE_USER = 2 - ROOT = 3 \ No newline at end of file diff --git a/backend/src/helpers.py b/backend/src/helpers.py deleted file mode 100644 index e3770c63..00000000 --- a/backend/src/helpers.py +++ /dev/null @@ -1,153 +0,0 @@ -import re -import ssl -import uuid -import os -import subprocess -from hashlib import sha256 -from io import BytesIO - -import certifi -from aiohttp.web import Request, Response, middleware -from aiohttp.typedefs import Handler -from aiohttp import ClientSession -from .localplatform import localplatform -from .customtypes import UserType -from logging import getLogger - -REMOTE_DEBUGGER_UNIT = "steam-web-debug-portforward.service" - -# global vars -csrf_token = str(uuid.uuid4()) -ssl_ctx = ssl.create_default_context(cafile=certifi.where()) - -assets_regex = re.compile("^/plugins/.*/assets/.*") -frontend_regex = re.compile("^/frontend/.*") -logger = getLogger("Main") - -def get_ssl_context(): - return ssl_ctx - -def get_csrf_token(): - return csrf_token - -@middleware -async def csrf_middleware(request: Request, handler: Handler): - if str(request.method) == "OPTIONS" or request.headers.get('Authentication') == csrf_token or str(request.rel_url) == "/auth/token" or str(request.rel_url).startswith("/plugins/load_main/") or str(request.rel_url).startswith("/static/") or str(request.rel_url).startswith("/steam_resource/") or str(request.rel_url).startswith("/frontend/") or assets_regex.match(str(request.rel_url)) or frontend_regex.match(str(request.rel_url)): - return await handler(request) - return Response(text='Forbidden', status=403) - -# Get the default homebrew path unless a home_path is specified. home_path argument is deprecated -def get_homebrew_path() -> str: - return localplatform.get_unprivileged_path() - -# Recursively create path and chown as user -def mkdir_as_user(path: str): - path = os.path.realpath(path) - os.makedirs(path, exist_ok=True) - localplatform.chown(path) - -# Fetches the version of loader -def get_loader_version() -> str: - try: - with open(os.path.join(os.getcwd(), ".loader.version"), "r", encoding="utf-8") as version_file: - return version_file.readline().strip() - except Exception as e: - logger.warn(f"Failed to execute get_loader_version(): {str(e)}") - return "unknown" - -# returns the appropriate system python paths -def get_system_pythonpaths() -> list[str]: - try: - # run as normal normal user if on linux to also include user python paths - proc = subprocess.run(["python3" if localplatform.ON_LINUX else "python", "-c", "import sys; print('\\n'.join(x for x in sys.path if x))"], - # TODO make this less insane - capture_output=True, user=localplatform.localplatform._get_user_id() if localplatform.ON_LINUX else None, env={} if localplatform.ON_LINUX else None) # type: ignore - return [x.strip() for x in proc.stdout.decode().strip().split("\n")] - except Exception as e: - logger.warn(f"Failed to execute get_system_pythonpaths(): {str(e)}") - return [] - -# Download Remote Binaries to local Plugin -async def download_remote_binary_to_path(url: str, binHash: str, path: str) -> bool: - rv = False - try: - if os.access(os.path.dirname(path), os.W_OK): - async with ClientSession() as client: - res = await client.get(url, ssl=get_ssl_context()) - if res.status == 200: - data = BytesIO(await res.read()) - remoteHash = sha256(data.getbuffer()).hexdigest() - if binHash == remoteHash: - data.seek(0) - with open(path, 'wb') as f: - f.write(data.getbuffer()) - rv = True - else: - raise Exception(f"Fatal Error: Hash Mismatch for remote binary {path}@{url}") - else: - rv = False - except: - rv = False - - return rv - -# Deprecated -def set_user(): - pass - -# Deprecated -def set_user_group() -> str: - return get_user_group() - -######### -# Below is legacy code, provided for backwards compatibility. This will break on windows -######### - -# Get the user id hosting the plugin loader -def get_user_id() -> int: - return localplatform.localplatform._get_user_id() # pyright: ignore [reportPrivateUsage] - -# Get the user hosting the plugin loader -def get_user() -> str: - return localplatform.localplatform._get_user() # pyright: ignore [reportPrivateUsage] - -# Get the effective user id of the running process -def get_effective_user_id() -> int: - return localplatform.localplatform._get_effective_user_id() # pyright: ignore [reportPrivateUsage] - -# Get the effective user of the running process -def get_effective_user() -> str: - return localplatform.localplatform._get_effective_user() # pyright: ignore [reportPrivateUsage] - -# Get the effective user group id of the running process -def get_effective_user_group_id() -> int: - return localplatform.localplatform._get_effective_user_group_id() # pyright: ignore [reportPrivateUsage] - -# Get the effective user group of the running process -def get_effective_user_group() -> str: - return localplatform.localplatform._get_effective_user_group() # pyright: ignore [reportPrivateUsage] - -# Get the user owner of the given file path. -def get_user_owner(file_path: str) -> str: - return localplatform.localplatform._get_user_owner(file_path) # pyright: ignore [reportPrivateUsage] - -# Get the user group of the given file path, or the user group hosting the plugin loader -def get_user_group(file_path: str | None = None) -> str: - return localplatform.localplatform._get_user_group(file_path) # pyright: ignore [reportPrivateUsage] - -# Get the group id of the user hosting the plugin loader -def get_user_group_id() -> int: - return localplatform.localplatform._get_user_group_id() # pyright: ignore [reportPrivateUsage] - -# Get the default home path unless a user is specified -def get_home_path(username: str | None = None) -> str: - return localplatform.get_home_path(UserType.ROOT if username == "root" else UserType.HOST_USER) - -async def is_systemd_unit_active(unit_name: str) -> bool: - return await localplatform.service_active(unit_name) - -async def stop_systemd_unit(unit_name: str) -> bool: - return await localplatform.service_stop(unit_name) - -async def start_systemd_unit(unit_name: str) -> bool: - return await localplatform.service_start(unit_name) diff --git a/backend/src/injector.py b/backend/src/injector.py deleted file mode 100644 index a217f689..00000000 --- a/backend/src/injector.py +++ /dev/null @@ -1,438 +0,0 @@ -# Injector code from https://github.com/SteamDeckHomebrew/steamdeck-ui-inject. More info on how it works there. - -from asyncio import sleep -from logging import getLogger -from typing import Any, Callable, List, TypedDict, Dict - -from aiohttp import ClientSession -from aiohttp.client_exceptions import ClientConnectorError, ClientOSError -from asyncio.exceptions import TimeoutError -import uuid - -BASE_ADDRESS = "http://localhost:8080" - -logger = getLogger("Injector") - -class _TabResponse(TypedDict): - title: str - id: str - url: str - webSocketDebuggerUrl: str - -class Tab: - cmd_id = 0 - - def __init__(self, res: _TabResponse) -> None: - self.title: str = res["title"] - self.id: str = res["id"] - self.url: str = res["url"] - self.ws_url: str = res["webSocketDebuggerUrl"] - - self.websocket = None - self.client = None - - async def open_websocket(self): - self.client = ClientSession() - self.websocket = await self.client.ws_connect(self.ws_url) # type: ignore - - async def close_websocket(self): - if self.websocket: - await self.websocket.close() - if self.client: - await self.client.close() - - async def listen_for_message(self): - if self.websocket: - async for message in self.websocket: - data = message.json() - yield data - logger.warn(f"The Tab {self.title} socket has been disconnected while listening for messages.") - await self.close_websocket() - - async def _send_devtools_cmd(self, dc: Dict[str, Any], receive: bool = True): - if self.websocket: - self.cmd_id += 1 - dc["id"] = self.cmd_id - await self.websocket.send_json(dc) - if receive: - async for msg in self.listen_for_message(): - if "id" in msg and msg["id"] == dc["id"]: - return msg - return None - raise RuntimeError("Websocket not opened") - - async def evaluate_js(self, js: str, run_async: bool | None = False, manage_socket: bool | None = True, get_result: bool = True): - try: - if manage_socket: - await self.open_websocket() - - res = await self._send_devtools_cmd({ - "method": "Runtime.evaluate", - "params": { - "expression": js, - "userGesture": True, - "awaitPromise": run_async - } - }, get_result) - - finally: - if manage_socket: - await self.close_websocket() - return res - - async def has_global_var(self, var_name: str, manage_socket: bool = True): - res = await self.evaluate_js(f"window['{var_name}'] !== null && window['{var_name}'] !== undefined", False, manage_socket) - assert res is not None - - if not "result" in res or not "result" in res["result"] or not "value" in res["result"]["result"]: - return False - - return res["result"]["result"]["value"] - - async def close(self, manage_socket: bool = True): - try: - if manage_socket: - await self.open_websocket() - - res = await self._send_devtools_cmd({ - "method": "Page.close", - }, False) - - finally: - if manage_socket: - await self.close_websocket() - return res - - async def enable(self): - """ - Enables page domain notifications. - """ - await self._send_devtools_cmd({ - "method": "Page.enable", - }, False) - - async def disable(self): - """ - Disables page domain notifications. - """ - await self._send_devtools_cmd({ - "method": "Page.disable", - }, False) - - async def refresh(self, manage_socket: bool = True): - try: - if manage_socket: - await self.open_websocket() - - await self._send_devtools_cmd({ - "method": "Page.reload", - }, False) - - finally: - if manage_socket: - await self.close_websocket() - - return - async def reload_and_evaluate(self, js: str, manage_socket: bool = True): - """ - Reloads the current tab, with JS to run on load via debugger - """ - try: - if manage_socket: - await self.open_websocket() - - await self._send_devtools_cmd({ - "method": "Debugger.enable" - }, True) - - await self._send_devtools_cmd({ - "method": "Runtime.evaluate", - "params": { - "expression": "location.reload();", - "userGesture": True, - "awaitPromise": False - } - }, False) - - breakpoint_res = await self._send_devtools_cmd({ - "method": "Debugger.setInstrumentationBreakpoint", - "params": { - "instrumentation": "beforeScriptExecution" - } - }, True) - - assert breakpoint_res is not None - - logger.info(breakpoint_res) - - # Page finishes loading when breakpoint hits - - for _ in range(20): - # this works around 1/5 of the time, so just send it 8 times. - # the js accounts for being injected multiple times allowing only one instance to run at a time anyway - await self._send_devtools_cmd({ - "method": "Runtime.evaluate", - "params": { - "expression": js, - "userGesture": True, - "awaitPromise": False - } - }, False) - - await self._send_devtools_cmd({ - "method": "Debugger.removeBreakpoint", - "params": { - "breakpointId": breakpoint_res["result"]["breakpointId"] - } - }, False) - - for _ in range(4): - await self._send_devtools_cmd({ - "method": "Debugger.resume" - }, False) - - await self._send_devtools_cmd({ - "method": "Debugger.disable" - }, True) - - finally: - if manage_socket: - await self.close_websocket() - return - - async def add_script_to_evaluate_on_new_document(self, js: str, add_dom_wrapper: bool = True, manage_socket: bool = True, get_result: bool = True): - """ - How the underlying call functions is not particularly clear from the devtools docs, so stealing puppeteer's description: - - Adds a function which would be invoked in one of the following scenarios: - * whenever the page is navigated - * whenever the child frame is attached or navigated. In this case, the - function is invoked in the context of the newly attached frame. - - The function is invoked after the document was created but before any of - its scripts were run. This is useful to amend the JavaScript environment, - e.g. to seed `Math.random`. - - Parameters - ---------- - js : str - The script to evaluate on new document - add_dom_wrapper : bool - True to wrap the script in a wait for the 'DOMContentLoaded' event. - DOM will usually not exist when this execution happens, - so it is necessary to delay til DOM is loaded if you are modifying it - manage_socket : bool - True to have this function handle opening/closing the websocket for this tab - get_result : bool - True to wait for the result of this call - - Returns - ------- - int or None - The identifier of the script added, used to remove it later. - (see remove_script_to_evaluate_on_new_document below) - None is returned if `get_result` is False - """ - try: - - wrappedjs = """ - function scriptFunc() { - {js} - } - if (document.readyState === 'loading') { - addEventListener('DOMContentLoaded', () => { - scriptFunc(); - }); - } else { - scriptFunc(); - } - """.format(js=js) if add_dom_wrapper else js - - if manage_socket: - await self.open_websocket() - - res = await self._send_devtools_cmd({ - "method": "Page.addScriptToEvaluateOnNewDocument", - "params": { - "source": wrappedjs - } - }, get_result) - - finally: - if manage_socket: - await self.close_websocket() - return res - - async def remove_script_to_evaluate_on_new_document(self, script_id: str, manage_socket: bool = True): - """ - Removes a script from a page that was added with `add_script_to_evaluate_on_new_document` - - Parameters - ---------- - script_id : int - The identifier of the script to remove (returned from `add_script_to_evaluate_on_new_document`) - """ - - try: - if manage_socket: - await self.open_websocket() - - await self._send_devtools_cmd({ - "method": "Page.removeScriptToEvaluateOnNewDocument", - "params": { - "identifier": script_id - } - }, False) - - finally: - if manage_socket: - await self.close_websocket() - - async def has_element(self, element_name: str, manage_socket: bool = True): - res = await self.evaluate_js(f"document.getElementById('{element_name}') != null", False, manage_socket) - assert res is not None - - if not "result" in res or not "result" in res["result"] or not "value" in res["result"]["result"]: - return False - - return res["result"]["result"]["value"] - - async def inject_css(self, style: str, manage_socket: bool = True): - try: - css_id = str(uuid.uuid4()) - - result = await self.evaluate_js( - f""" - (function() {{ - const style = document.createElement('style'); - style.id = "{css_id}"; - document.head.append(style); - style.textContent = `{style}`; - }})() - """, False, manage_socket) - - assert result is not None - - if "exceptionDetails" in result["result"]: - return { - "success": False, - "result": result["result"] - } - - return { - "success": True, - "result": css_id - } - except Exception as e: - return { - "success": False, - "result": e - } - - async def remove_css(self, css_id: str, manage_socket: bool = True): - try: - result = await self.evaluate_js( - f""" - (function() {{ - let style = document.getElementById("{css_id}"); - - if (style.nodeName.toLowerCase() == 'style') - style.parentNode.removeChild(style); - }})() - """, False, manage_socket) - - assert result is not None - - if "exceptionDetails" in result["result"]: - return { - "success": False, - "result": result - } - - return { - "success": True - } - except Exception as e: - return { - "success": False, - "result": e - } - - async def get_steam_resource(self, url: str): - res = await self.evaluate_js(f'(async function test() {{ return await (await fetch("{url}")).text() }})()', True) - assert res is not None - return res["result"]["result"]["value"] - - def __repr__(self): - return self.title - - -async def get_tabs() -> List[Tab]: - res = {} - - na = False - while True: - try: - async with ClientSession() as web: - res = await web.get(f"{BASE_ADDRESS}/json", timeout=3) - except ClientConnectorError: - if not na: - logger.debug("Steam isn't available yet. Wait for a moment...") - na = True - await sleep(5) - except ClientOSError: - logger.warn(f"The request to {BASE_ADDRESS}/json was reset") - await sleep(1) - except TimeoutError: - logger.warn(f"The request to {BASE_ADDRESS}/json timed out") - await sleep(1) - else: - break - - if res.status == 200: - r = await res.json() - return [Tab(i) for i in r] - else: - raise Exception(f"/json did not return 200. {await res.text()}") - - -async def get_tab(tab_name: str) -> Tab: - tabs = await get_tabs() - tab = next((i for i in tabs if i.title == tab_name), None) - if not tab: - raise ValueError(f"Tab {tab_name} not found") - return tab - -async def get_tab_lambda(test: Callable[[Tab], bool]) -> Tab: - tabs = await get_tabs() - tab = next((i for i in tabs if test(i)), None) - if not tab: - raise ValueError(f"Tab not found by lambda") - return tab - -SHARED_CTX_NAMES = ["SharedJSContext", "Steam Shared Context presented by Valveā„¢", "Steam", "SP"] -CLOSEABLE_URLS = ["about:blank", "data:text/html,%3Cbody%3E%3C%2Fbody%3E"] # Closing anything other than these *really* likes to crash Steam -DO_NOT_CLOSE_URL = "Valve Steam Gamepad/default" # Steam Big Picture Mode tab - -def tab_is_gamepadui(t: Tab) -> bool: - return "https://steamloopback.host/routes/" in t.url and t.title in SHARED_CTX_NAMES - -async def get_gamepadui_tab() -> Tab: - tabs = await get_tabs() - tab = next((i for i in tabs if tab_is_gamepadui(i)), None) - if not tab: - raise ValueError(f"GamepadUI Tab not found") - return tab - -async def inject_to_tab(tab_name: str, js: str, run_async: bool = False): - tab = await get_tab(tab_name) - - return await tab.evaluate_js(js, run_async) - -async def close_old_tabs(): - tabs = await get_tabs() - for t in tabs: - if not t.title or (t.title not in SHARED_CTX_NAMES and any(url in t.url for url in CLOSEABLE_URLS) and DO_NOT_CLOSE_URL not in t.url): - logger.debug("Closing tab: " + getattr(t, "title", "Untitled")) - await t.close() - await sleep(0.5) diff --git a/backend/src/loader.py b/backend/src/loader.py deleted file mode 100644 index 7567912c..00000000 --- a/backend/src/loader.py +++ /dev/null @@ -1,200 +0,0 @@ -from __future__ import annotations -from asyncio import AbstractEventLoop, Queue, sleep -from json.decoder import JSONDecodeError -from logging import getLogger -from os import listdir, path -from pathlib import Path -from traceback import print_exc -from typing import Any, Tuple - -from aiohttp import web -from os.path import exists -from watchdog.events import RegexMatchingEventHandler, DirCreatedEvent, DirModifiedEvent, FileCreatedEvent, FileModifiedEvent # type: ignore -from watchdog.observers import Observer # type: ignore - -from typing import TYPE_CHECKING -if TYPE_CHECKING: - from .main import PluginManager - -from .injector import get_gamepadui_tab -from .plugin.plugin import PluginWrapper - -Plugins = dict[str, PluginWrapper] -ReloadQueue = Queue[Tuple[str, str, bool | None] | Tuple[str, str]] - -#TODO: Remove placeholder method -async def log_plugin_emitted_message(message: Any): - getLogger().debug(f"EMITTED MESSAGE: " + str(message)) - -class FileChangeHandler(RegexMatchingEventHandler): - def __init__(self, queue: ReloadQueue, plugin_path: str) -> None: - super().__init__(regexes=[r'^.*?dist\/index\.js$', r'^.*?main\.py$']) # type: ignore - self.logger = getLogger("file-watcher") - self.plugin_path = plugin_path - self.queue = queue - self.disabled = True - - def maybe_reload(self, src_path: str): - if self.disabled: - return - plugin_dir = Path(path.relpath(src_path, self.plugin_path)).parts[0] - if exists(path.join(self.plugin_path, plugin_dir, "plugin.json")): - self.queue.put_nowait((path.join(self.plugin_path, plugin_dir, "main.py"), plugin_dir, True)) - - def on_created(self, event: DirCreatedEvent | FileCreatedEvent): - src_path = event.src_path - if "__pycache__" in src_path: - return - - # check to make sure this isn't a directory - if path.isdir(src_path): - return - - # get the directory name of the plugin so that we can find its "main.py" and reload it; the - # file that changed is not necessarily the one that needs to be reloaded - self.logger.debug(f"file created: {src_path}") - self.maybe_reload(src_path) - - def on_modified(self, event: DirModifiedEvent | FileModifiedEvent): - src_path = event.src_path - if "__pycache__" in src_path: - return - - # check to make sure this isn't a directory - if path.isdir(src_path): - return - - # get the directory name of the plugin so that we can find its "main.py" and reload it; the - # file that changed is not necessarily the one that needs to be reloaded - self.logger.debug(f"file modified: {src_path}") - self.maybe_reload(src_path) - -class Loader: - def __init__(self, server_instance: PluginManager, plugin_path: str, loop: AbstractEventLoop, live_reload: bool = False) -> None: - self.loop = loop - self.logger = getLogger("Loader") - self.plugin_path = plugin_path - self.logger.info(f"plugin_path: {self.plugin_path}") - self.plugins: Plugins = {} - self.watcher = None - self.live_reload = live_reload - self.reload_queue: ReloadQueue = Queue() - self.loop.create_task(self.handle_reloads()) - - if live_reload: - self.observer = Observer() - self.watcher = FileChangeHandler(self.reload_queue, plugin_path) - self.observer.schedule(self.watcher, self.plugin_path, recursive=True) # type: ignore - self.observer.start() - self.loop.create_task(self.enable_reload_wait()) - - server_instance.web_app.add_routes([ - web.get("/frontend/{path:.*}", self.handle_frontend_assets), - web.get("/locales/{path:.*}", self.handle_frontend_locales), - web.get("/plugins", self.get_plugins), - web.get("/plugins/{plugin_name}/frontend_bundle", self.handle_frontend_bundle), - web.post("/plugins/{plugin_name}/methods/{method_name}", self.handle_plugin_method_call), - web.get("/plugins/{plugin_name}/assets/{path:.*}", self.handle_plugin_frontend_assets), - web.post("/plugins/{plugin_name}/reload", self.handle_backend_reload_request) - ]) - - async def enable_reload_wait(self): - if self.live_reload: - await sleep(10) - if self.watcher: - self.logger.info("Hot reload enabled") - self.watcher.disabled = False - - async def handle_frontend_assets(self, request: web.Request): - file = Path(__file__).parents[1].joinpath("static").joinpath(request.match_info["path"]) - return web.FileResponse(file, headers={"Cache-Control": "no-cache"}) - - async def handle_frontend_locales(self, request: web.Request): - req_lang = request.match_info["path"] - file = Path(__file__).parents[1].joinpath("locales").joinpath(req_lang) - if exists(file): - return web.FileResponse(file, headers={"Cache-Control": "no-cache", "Content-Type": "application/json"}) - else: - self.logger.info(f"Language {req_lang} not available, returning an empty dictionary") - return web.json_response(data={}, headers={"Cache-Control": "no-cache"}) - - async def get_plugins(self, request: web.Request): - plugins = list(self.plugins.values()) - return web.json_response([{"name": str(i), "version": i.version} for i in plugins]) - - async def handle_plugin_frontend_assets(self, request: web.Request): - plugin = self.plugins[request.match_info["plugin_name"]] - file = path.join(self.plugin_path, plugin.plugin_directory, "dist/assets", request.match_info["path"]) - - return web.FileResponse(file, headers={"Cache-Control": "no-cache"}) - - async def handle_frontend_bundle(self, request: web.Request): - plugin = self.plugins[request.match_info["plugin_name"]] - - with open(path.join(self.plugin_path, plugin.plugin_directory, "dist/index.js"), "r", encoding="utf-8") as bundle: - return web.Response(text=bundle.read(), content_type="application/javascript") - - def import_plugin(self, file: str, plugin_directory: str, refresh: bool | None = False, batch: bool | None = False): - try: - plugin = PluginWrapper(file, plugin_directory, self.plugin_path) - if plugin.name in self.plugins: - if not "debug" in plugin.flags and refresh: - self.logger.info(f"Plugin {plugin.name} is already loaded and has requested to not be re-loaded") - return - else: - self.plugins[plugin.name].stop() - self.plugins.pop(plugin.name, None) - if plugin.passive: - self.logger.info(f"Plugin {plugin.name} is passive") - self.plugins[plugin.name] = plugin.start() - self.plugins[plugin.name].set_emitted_message_callback(log_plugin_emitted_message) - self.logger.info(f"Loaded {plugin.name}") - if not batch: - self.loop.create_task(self.dispatch_plugin(plugin.name, plugin.version)) - except Exception as e: - self.logger.error(f"Could not load {file}. {e}") - print_exc() - - async def dispatch_plugin(self, name: str, version: str | None): - gpui_tab = await get_gamepadui_tab() - await gpui_tab.evaluate_js(f"window.importDeckyPlugin('{name}', '{version}')") - - def import_plugins(self): - self.logger.info(f"import plugins from {self.plugin_path}") - - directories = [i for i in listdir(self.plugin_path) if path.isdir(path.join(self.plugin_path, i)) and path.isfile(path.join(self.plugin_path, i, "plugin.json"))] - for directory in directories: - self.logger.info(f"found plugin: {directory}") - self.import_plugin(path.join(self.plugin_path, directory, "main.py"), directory, False, True) - - async def handle_reloads(self): - while True: - args = await self.reload_queue.get() - self.import_plugin(*args) # type: ignore - - async def handle_plugin_method_call(self, request: web.Request): - res = {} - plugin = self.plugins[request.match_info["plugin_name"]] - method_name = request.match_info["method_name"] - try: - method_info = await request.json() - args: Any = method_info["args"] - except JSONDecodeError: - args = {} - try: - if method_name.startswith("_"): - raise RuntimeError("Tried to call private method") - res["result"] = await plugin.execute_method(method_name, args) - res["success"] = True - except Exception as e: - res["result"] = str(e) - res["success"] = False - return web.json_response(res) - - async def handle_backend_reload_request(self, request: web.Request): - plugin_name : str = request.match_info["plugin_name"] - plugin = self.plugins[plugin_name] - - await self.reload_queue.put((plugin.file, plugin.plugin_directory)) - - return web.Response(status=200) \ No newline at end of file diff --git a/backend/src/localplatform/localplatform.py b/backend/src/localplatform/localplatform.py deleted file mode 100644 index 028eff8f..00000000 --- a/backend/src/localplatform/localplatform.py +++ /dev/null @@ -1,52 +0,0 @@ -import platform, os - -ON_WINDOWS = platform.system() == "Windows" -ON_LINUX = not ON_WINDOWS - -if ON_WINDOWS: - from .localplatformwin import * - from . import localplatformwin as localplatform -else: - from .localplatformlinux import * - from . import localplatformlinux as localplatform - -def get_privileged_path() -> str: - '''Get path accessible by elevated user. Holds plugins, decky loader and decky loader configs''' - return localplatform.get_privileged_path() - -def get_unprivileged_path() -> str: - '''Get path accessible by non-elevated user. Holds plugin configuration, plugin data and plugin logs. Externally referred to as the 'Homebrew' directory''' - return localplatform.get_unprivileged_path() - -def get_unprivileged_user() -> str: - '''Get user that should own files made in unprivileged path''' - return localplatform.get_unprivileged_user() - -def get_chown_plugin_path() -> bool: - return os.getenv("CHOWN_PLUGIN_PATH", "1") == "1" - -def get_server_host() -> str: - return os.getenv("SERVER_HOST", "127.0.0.1") - -def get_server_port() -> int: - return int(os.getenv("SERVER_PORT", "1337")) - -def get_live_reload() -> bool: - return os.getenv("LIVE_RELOAD", "1") == "1" - -def get_keep_systemd_service() -> bool: - return os.getenv("KEEP_SYSTEMD_SERVICE", "0") == "1" - -def get_log_level() -> int: - return {"CRITICAL": 50, "ERROR": 40, "WARNING": 30, "INFO": 20, "DEBUG": 10}[ - os.getenv("LOG_LEVEL", "INFO") - ] - -def get_selinux() -> bool: - if ON_LINUX: - from subprocess import check_output - try: - if (check_output("getenforce").decode("ascii").strip("\n") == "Enforcing"): return True - except FileNotFoundError: - pass - return False diff --git a/backend/src/localplatform/localplatformlinux.py b/backend/src/localplatform/localplatformlinux.py deleted file mode 100644 index 1ec3fc1a..00000000 --- a/backend/src/localplatform/localplatformlinux.py +++ /dev/null @@ -1,192 +0,0 @@ -import os, pwd, grp, sys, logging -from subprocess import call, run, DEVNULL, PIPE, STDOUT -from ..customtypes import UserType - -logger = logging.getLogger("localplatform") - -# Get the user id hosting the plugin loader -def _get_user_id() -> int: - return pwd.getpwnam(_get_user()).pw_uid - -# Get the user hosting the plugin loader -def _get_user() -> str: - return get_unprivileged_user() - -# Get the effective user id of the running process -def _get_effective_user_id() -> int: - return os.geteuid() - -# Get the effective user of the running process -def _get_effective_user() -> str: - return pwd.getpwuid(_get_effective_user_id()).pw_name - -# Get the effective user group id of the running process -def _get_effective_user_group_id() -> int: - return os.getegid() - -# Get the effective user group of the running process -def _get_effective_user_group() -> str: - return grp.getgrgid(_get_effective_user_group_id()).gr_name - -# Get the user owner of the given file path. -def _get_user_owner(file_path: str) -> str: - return pwd.getpwuid(os.stat(file_path).st_uid).pw_name - -# Get the user group of the given file path, or the user group hosting the plugin loader -def _get_user_group(file_path: str | None = None) -> str: - return grp.getgrgid(os.stat(file_path).st_gid if file_path is not None else _get_user_group_id()).gr_name - -# Get the group id of the user hosting the plugin loader -def _get_user_group_id() -> int: - return pwd.getpwuid(_get_user_id()).pw_gid - -def chown(path : str, user : UserType = UserType.HOST_USER, recursive : bool = True) -> bool: - user_str = "" - - if user == UserType.HOST_USER: - user_str = _get_user()+":"+_get_user_group() - elif user == UserType.EFFECTIVE_USER: - user_str = _get_effective_user()+":"+_get_effective_user_group() - elif user == UserType.ROOT: - user_str = "root:root" - else: - raise Exception("Unknown User Type") - - result = call(["chown", "-R", user_str, path] if recursive else ["chown", user_str, path]) - return result == 0 - -def chmod(path : str, permissions : int, recursive : bool = True) -> bool: - if _get_effective_user_id() != 0: - return True - result = call(["chmod", "-R", str(permissions), path] if recursive else ["chmod", str(permissions), path]) - return result == 0 - -def folder_owner(path : str) -> UserType|None: - user_owner = _get_user_owner(path) - - if (user_owner == _get_user()): - return UserType.HOST_USER - - elif (user_owner == _get_effective_user()): - return UserType.EFFECTIVE_USER - - else: - return None - -def get_home_path(user : UserType = UserType.HOST_USER) -> str: - user_name = "root" - - if user == UserType.HOST_USER: - user_name = _get_user() - elif user == UserType.EFFECTIVE_USER: - user_name = _get_effective_user() - elif user == UserType.ROOT: - pass - else: - raise Exception("Unknown User Type") - - return pwd.getpwnam(user_name).pw_dir - -def get_username() -> str: - return _get_user() - -def setgid(user : UserType = UserType.HOST_USER): - user_id = 0 - - if user == UserType.HOST_USER: - user_id = _get_user_group_id() - elif user == UserType.ROOT: - pass - else: - raise Exception("Unknown user type") - - os.setgid(user_id) - -def setuid(user : UserType = UserType.HOST_USER): - user_id = 0 - - if user == UserType.HOST_USER: - user_id = _get_user_id() - elif user == UserType.ROOT: - pass - else: - raise Exception("Unknown user type") - - os.setuid(user_id) - -async def service_active(service_name : str) -> bool: - res = run(["systemctl", "is-active", service_name], stdout=DEVNULL, stderr=DEVNULL) - return res.returncode == 0 - -async def service_restart(service_name : str) -> bool: - call(["systemctl", "daemon-reload"]) - cmd = ["systemctl", "restart", service_name] - res = run(cmd, stdout=PIPE, stderr=STDOUT) - return res.returncode == 0 - -async def service_stop(service_name : str) -> bool: - cmd = ["systemctl", "stop", service_name] - res = run(cmd, stdout=PIPE, stderr=STDOUT) - return res.returncode == 0 - -async def service_start(service_name : str) -> bool: - cmd = ["systemctl", "start", service_name] - res = run(cmd, stdout=PIPE, stderr=STDOUT) - return res.returncode == 0 - -def get_privileged_path() -> str: - path = os.getenv("PRIVILEGED_PATH") - - if path == None: - path = get_unprivileged_path() - - return path - -def _parent_dir(path : str | None) -> str | None: - if path == None: - return None - - if path.endswith('/'): - path = path[:-1] - - return os.path.dirname(path) - -def get_unprivileged_path() -> str: - path = os.getenv("UNPRIVILEGED_PATH") - - if path == None: - path = _parent_dir(os.getenv("PLUGIN_PATH")) - - if path == None: - logger.debug("Unprivileged path is not properly configured. Making something up!") - # Expected path of loader binary is /home/deck/homebrew/service/PluginLoader - path = _parent_dir(_parent_dir(os.path.realpath(sys.argv[0]))) - - if path != None and not os.path.exists(path): - path = None - - if path == None: - logger.warn("Unprivileged path is not properly configured. Defaulting to /home/deck/homebrew") - path = "/home/deck/homebrew" # We give up - - return path - - -def get_unprivileged_user() -> str: - user = os.getenv("UNPRIVILEGED_USER") - - if user == None: - # Lets hope we can extract it from the unprivileged dir - dir = os.path.realpath(get_unprivileged_path()) - - pws = sorted(pwd.getpwall(), reverse=True, key=lambda pw: len(pw.pw_dir)) - for pw in pws: - if dir.startswith(os.path.realpath(pw.pw_dir)): - user = pw.pw_name - break - - if user == None: - logger.warn("Unprivileged user is not properly configured. Defaulting to 'deck'") - user = 'deck' - - return user diff --git a/backend/src/localplatform/localplatformwin.py b/backend/src/localplatform/localplatformwin.py deleted file mode 100644 index 212ff2fe..00000000 --- a/backend/src/localplatform/localplatformwin.py +++ /dev/null @@ -1,53 +0,0 @@ -from ..customtypes import UserType -import os, sys - -def chown(path : str, user : UserType = UserType.HOST_USER, recursive : bool = True) -> bool: - return True # Stubbed - -def chmod(path : str, permissions : int, recursive : bool = True) -> bool: - return True # Stubbed - -def folder_owner(path : str) -> UserType|None: - return UserType.HOST_USER # Stubbed - -def get_home_path(user : UserType = UserType.HOST_USER) -> str: - return os.path.expanduser("~") # Mostly stubbed - -def setgid(user : UserType = UserType.HOST_USER): - pass # Stubbed - -def setuid(user : UserType = UserType.HOST_USER): - pass # Stubbed - -async def service_active(service_name : str) -> bool: - return True # Stubbed - -async def service_stop(service_name : str) -> bool: - return True # Stubbed - -async def service_start(service_name : str) -> bool: - return True # Stubbed - -async def service_restart(service_name : str) -> bool: - if service_name == "plugin_loader": - sys.exit(42) - - return True # Stubbed - -def get_username() -> str: - return os.getlogin() - -def get_privileged_path() -> str: - '''On windows, privileged_path is equal to unprivileged_path''' - return get_unprivileged_path() - -def get_unprivileged_path() -> str: - path = os.getenv("UNPRIVILEGED_PATH") - - if path == None: - path = os.getenv("PRIVILEGED_PATH", os.path.join(os.path.expanduser("~"), "homebrew")) - - return path - -def get_unprivileged_user() -> str: - return os.getenv("UNPRIVILEGED_USER", os.getlogin()) diff --git a/backend/src/localplatform/localsocket.py b/backend/src/localplatform/localsocket.py deleted file mode 100644 index 93b1ea18..00000000 --- a/backend/src/localplatform/localsocket.py +++ /dev/null @@ -1,145 +0,0 @@ -import asyncio, time -from typing import Any, Callable, Coroutine -import random - -from .localplatform import ON_WINDOWS - -BUFFER_LIMIT = 2 ** 20 # 1 MiB - -class UnixSocket: - def __init__(self, on_new_message: Callable[[str], Coroutine[Any, Any, Any]]): - ''' - on_new_message takes 1 string argument. - It's return value gets used, if not None, to write data to the socket. - Method should be async - ''' - self.socket_addr = f"/tmp/plugin_socket_{time.time()}" - self.on_new_message = on_new_message - self.socket = None - self.reader = None - self.writer = None - self.server_writer = None - - async def setup_server(self): - self.socket = await asyncio.start_unix_server(self._listen_for_method_call, path=self.socket_addr, limit=BUFFER_LIMIT) - - async def _open_socket_if_not_exists(self): - if not self.reader: - retries = 0 - while retries < 10: - try: - self.reader, self.writer = await asyncio.open_unix_connection(self.socket_addr, limit=BUFFER_LIMIT) - return True - except: - await asyncio.sleep(2) - retries += 1 - return False - else: - return True - - async def get_socket_connection(self): - if not await self._open_socket_if_not_exists(): - return None, None - - return self.reader, self.writer - - async def close_socket_connection(self): - if self.writer != None: - self.writer.close() - - self.reader = None - - async def read_single_line(self) -> str|None: - reader, _ = await self.get_socket_connection() - - try: - assert reader - except AssertionError: - return - - return await self._read_single_line(reader) - - async def write_single_line(self, message : str): - _, writer = await self.get_socket_connection() - - try: - assert writer - except AssertionError: - return - - await self._write_single_line(writer, message) - - async def _read_single_line(self, reader: asyncio.StreamReader) -> str: - line = bytearray() - while True: - try: - line.extend(await reader.readuntil()) - except asyncio.LimitOverrunError: - line.extend(await reader.read(reader._limit)) # type: ignore - continue - except asyncio.IncompleteReadError as err: - line.extend(err.partial) - break - else: - break - - return line.decode("utf-8") - - async def _write_single_line(self, writer: asyncio.StreamWriter, message : str): - if not message.endswith("\n"): - message += "\n" - - writer.write(message.encode("utf-8")) - await writer.drain() - - async def write_single_line_server(self, message: str): - if self.server_writer is None: - return - await self._write_single_line(self.server_writer, message) - - async def _listen_for_method_call(self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter): - self.server_writer = writer - while True: - - def _(task: asyncio.Task[str|None]): - res = task.result() - if res is not None: - asyncio.create_task(self._write_single_line(writer, res)) - - line = await self._read_single_line(reader) - asyncio.create_task(self.on_new_message(line)).add_done_callback(_) - -class PortSocket (UnixSocket): - def __init__(self, on_new_message: Callable[[str], Coroutine[Any, Any, Any]]): - ''' - on_new_message takes 1 string argument. - It's return value gets used, if not None, to write data to the socket. - Method should be async - ''' - super().__init__(on_new_message) - self.host = "127.0.0.1" - self.port = random.sample(range(40000, 60000), 1)[0] - - async def setup_server(self): - self.socket = await asyncio.start_server(self._listen_for_method_call, host=self.host, port=self.port, limit=BUFFER_LIMIT) - - async def _open_socket_if_not_exists(self): - if not self.reader: - retries = 0 - while retries < 10: - try: - self.reader, self.writer = await asyncio.open_connection(host=self.host, port=self.port, limit=BUFFER_LIMIT) - return True - except: - await asyncio.sleep(2) - retries += 1 - return False - else: - return True - -if ON_WINDOWS: - class LocalSocket (PortSocket): # type: ignore - pass -else: - class LocalSocket (UnixSocket): - pass \ No newline at end of file diff --git a/backend/src/main.py b/backend/src/main.py deleted file mode 100644 index 86c4720d..00000000 --- a/backend/src/main.py +++ /dev/null @@ -1,191 +0,0 @@ -# Change PyInstaller files permissions -import sys -from typing import Dict -from .localplatform.localplatform import (chmod, chown, service_stop, service_start, - ON_WINDOWS, get_log_level, get_live_reload, - get_server_port, get_server_host, get_chown_plugin_path, - get_privileged_path) -if hasattr(sys, '_MEIPASS'): - chmod(sys._MEIPASS, 755) # type: ignore -# Full imports -from asyncio import AbstractEventLoop, new_event_loop, set_event_loop, sleep -from logging import basicConfig, getLogger -from os import path -from traceback import format_exc -import multiprocessing - -import aiohttp_cors # type: ignore -# Partial imports -from aiohttp import client_exceptions -from aiohttp.web import Application, Response, Request, get, run_app, static # type: ignore -from aiohttp_jinja2 import setup as jinja_setup - -# local modules -from .browser import PluginBrowser -from .helpers import (REMOTE_DEBUGGER_UNIT, csrf_middleware, get_csrf_token, - mkdir_as_user, get_system_pythonpaths, get_effective_user_id) - -from .injector import get_gamepadui_tab, Tab, close_old_tabs -from .loader import Loader -from .settings import SettingsManager -from .updater import Updater -from .utilities import Utilities -from .customtypes import UserType - - -basicConfig( - level=get_log_level(), - format="[%(module)s][%(levelname)s]: %(message)s" -) - -logger = getLogger("Main") -plugin_path = path.join(get_privileged_path(), "plugins") - -def chown_plugin_dir(): - if not path.exists(plugin_path): # For safety, create the folder before attempting to do anything with it - mkdir_as_user(plugin_path) - - if not chown(plugin_path, UserType.HOST_USER) or not chmod(plugin_path, 555): - logger.error(f"chown/chmod exited with a non-zero exit code") - -if get_chown_plugin_path() == True: - chown_plugin_dir() - -class PluginManager: - def __init__(self, loop: AbstractEventLoop) -> None: - self.loop = loop - self.web_app = Application() - self.web_app.middlewares.append(csrf_middleware) - self.cors = aiohttp_cors.setup(self.web_app, defaults={ - "https://steamloopback.host": aiohttp_cors.ResourceOptions( - expose_headers="*", - allow_headers="*", - allow_credentials=True - ) - }) - self.plugin_loader = Loader(self, plugin_path, self.loop, get_live_reload()) - self.settings = SettingsManager("loader", path.join(get_privileged_path(), "settings")) - self.plugin_browser = PluginBrowser(plugin_path, self.plugin_loader.plugins, self.plugin_loader, self.settings) - self.utilities = Utilities(self) - self.updater = Updater(self) - - jinja_setup(self.web_app) - - async def startup(_: Application): - if self.settings.getSetting("cef_forward", False): - self.loop.create_task(service_start(REMOTE_DEBUGGER_UNIT)) - else: - self.loop.create_task(service_stop(REMOTE_DEBUGGER_UNIT)) - self.loop.create_task(self.loader_reinjector()) - self.loop.create_task(self.load_plugins()) - - self.web_app.on_startup.append(startup) - - self.loop.set_exception_handler(self.exception_handler) - self.web_app.add_routes([get("/auth/token", self.get_auth_token)]) - - for route in list(self.web_app.router.routes()): - self.cors.add(route) # type: ignore - self.web_app.add_routes([static("/static", path.join(path.dirname(__file__), '..', 'static'))]) - - def exception_handler(self, loop: AbstractEventLoop, context: Dict[str, str]): - if context["message"] == "Unclosed connection": - return - loop.default_exception_handler(context) - - async def get_auth_token(self, request: Request): - return Response(text=get_csrf_token()) - - async def load_plugins(self): - # await self.wait_for_server() - logger.debug("Loading plugins") - self.plugin_loader.import_plugins() - # await inject_to_tab("SP", "window.syncDeckyPlugins();") - if self.settings.getSetting("pluginOrder", None) == None: - self.settings.setSetting("pluginOrder", list(self.plugin_loader.plugins.keys())) - logger.debug("Did not find pluginOrder setting, set it to default") - - async def loader_reinjector(self): - while True: - tab = None - nf = False - dc = False - while not tab: - try: - tab = await get_gamepadui_tab() - except (client_exceptions.ClientConnectorError, client_exceptions.ServerDisconnectedError): - if not dc: - logger.debug("Couldn't connect to debugger, waiting...") - dc = True - pass - except ValueError: - if not nf: - logger.debug("Couldn't find GamepadUI tab, waiting...") - nf = True - pass - if not tab: - await sleep(5) - await tab.open_websocket() - await tab.enable() - await self.inject_javascript(tab, True) - try: - async for msg in tab.listen_for_message(): - # this gets spammed a lot - if msg.get("method", None) != "Page.navigatedWithinDocument": - logger.debug("Page event: " + str(msg.get("method", None))) - if msg.get("method", None) == "Page.domContentEventFired": - if not await tab.has_global_var("deckyHasLoaded", False): - await self.inject_javascript(tab) - if msg.get("method", None) == "Inspector.detached": - logger.info("CEF has requested that we detach.") - await tab.close_websocket() - break - # If this is a forceful disconnect the loop will just stop without any failure message. In this case, injector.py will handle this for us so we don't need to close the socket. - # This is because of https://github.com/aio-libs/aiohttp/blob/3ee7091b40a1bc58a8d7846e7878a77640e96996/aiohttp/client_ws.py#L321 - logger.info("CEF has disconnected...") - # At this point the loop starts again and we connect to the freshly started Steam client once it is ready. - except Exception: - logger.error("Exception while reading page events " + format_exc()) - await tab.close_websocket() - pass - # while True: - # await sleep(5) - # if not await tab.has_global_var("deckyHasLoaded", False): - # logger.info("Plugin loader isn't present in Steam anymore, reinjecting...") - # await self.inject_javascript(tab) - - async def inject_javascript(self, tab: Tab, first: bool=False, request: Request|None=None): - logger.info("Loading Decky frontend!") - try: - if first: - if await tab.has_global_var("deckyHasLoaded", False): - await close_old_tabs() - await tab.evaluate_js("try{if (window.deckyHasLoaded){setTimeout(() => location.reload(), 100)}else{window.deckyHasLoaded = true;(async()=>{try{while(!window.SP_REACT){await new Promise(r => setTimeout(r, 10))};await import('http://localhost:1337/frontend/index.js')}catch(e){console.error(e)};})();}}catch(e){console.error(e)}", False, False, False) - except: - logger.info("Failed to inject JavaScript into tab\n" + format_exc()) - pass - - def run(self): - return run_app(self.web_app, host=get_server_host(), port=get_server_port(), loop=self.loop, access_log=None) - -def main(): - if ON_WINDOWS: - # Fix windows/flask not recognising that .js means 'application/javascript' - import mimetypes - mimetypes.add_type('application/javascript', '.js') - - # Required for multiprocessing support in frozen files - multiprocessing.freeze_support() - else: - if get_effective_user_id() != 0: - logger.warning(f"decky is running as an unprivileged user, this is not officially supported and may cause issues") - - # Append the loader's plugin path to the recognized python paths - sys.path.append(path.join(path.dirname(__file__), "..", "plugin")) - - # Append the system and user python paths - sys.path.extend(get_system_pythonpaths()) - - loop = new_event_loop() - set_event_loop(loop) - PluginManager(loop).run() diff --git a/backend/src/plugin/method_call_request.py b/backend/src/plugin/method_call_request.py deleted file mode 100644 index cebe34f8..00000000 --- a/backend/src/plugin/method_call_request.py +++ /dev/null @@ -1,29 +0,0 @@ -from typing import Any, TypedDict -from uuid import uuid4 -from asyncio import Event - -class SocketResponseDict(TypedDict): - id: str - success: bool - res: Any - -class MethodCallResponse: - def __init__(self, success: bool, result: Any) -> None: - self.success = success - self.result = result - -class MethodCallRequest: - def __init__(self) -> None: - self.id = str(uuid4()) - self.event = Event() - self.response: MethodCallResponse - - def set_result(self, dc: SocketResponseDict): - self.response = MethodCallResponse(dc["success"], dc["res"]) - self.event.set() - - async def wait_for_result(self): - await self.event.wait() - if not self.response.success: - raise Exception(self.response.result) - return self.response.result \ No newline at end of file diff --git a/backend/src/plugin/plugin.py b/backend/src/plugin/plugin.py deleted file mode 100644 index 6c338106..00000000 --- a/backend/src/plugin/plugin.py +++ /dev/null @@ -1,84 +0,0 @@ -from asyncio import Task, create_task -from json import dumps, load, loads -from logging import getLogger -from os import path -from multiprocessing import Process - -from .sandboxed_plugin import SandboxedPlugin -from .method_call_request import MethodCallRequest -from ..localplatform.localsocket import LocalSocket - -from typing import Any, Callable, Coroutine, Dict - -class PluginWrapper: - def __init__(self, file: str, plugin_directory: str, plugin_path: str) -> None: - self.file = file - self.plugin_path = plugin_path - self.plugin_directory = plugin_directory - - self.version = None - - json = load(open(path.join(plugin_path, plugin_directory, "plugin.json"), "r", encoding="utf-8")) - if path.isfile(path.join(plugin_path, plugin_directory, "package.json")): - package_json = load(open(path.join(plugin_path, plugin_directory, "package.json"), "r", encoding="utf-8")) - self.version = package_json["version"] - - self.name = json["name"] - self.author = json["author"] - self.flags = json["flags"] - - self.passive = not path.isfile(self.file) - - self.log = getLogger("plugin") - - self.sandboxed_plugin = SandboxedPlugin(self.name, self.passive, self.flags, self.file, self.plugin_directory, self.plugin_path, self.version, self.author) - #TODO: Maybe make LocalSocket not require on_new_message to make this cleaner - self._socket = LocalSocket(self.sandboxed_plugin.on_new_message) - self._listener_task: Task[Any] - self._method_call_requests: Dict[str, MethodCallRequest] = {} - - self.emitted_message_callback: Callable[[Dict[Any, Any]], Coroutine[Any, Any, Any]] - - def __str__(self) -> str: - return self.name - - async def _response_listener(self): - while True: - try: - line = await self._socket.read_single_line() - if line != None: - res = loads(line) - if res["id"] == "0": - create_task(self.emitted_message_callback(res["payload"])) - else: - self._method_call_requests.pop(res["id"]).set_result(res) - except: - pass - - def set_emitted_message_callback(self, callback: Callable[[Dict[Any, Any]], Coroutine[Any, Any, Any]]): - self.emitted_message_callback = callback - - async def execute_method(self, method_name: str, kwargs: Dict[Any, Any]): - if self.passive: - raise RuntimeError("This plugin is passive (aka does not implement main.py)") - - request = MethodCallRequest() - await self._socket.get_socket_connection() - await self._socket.write_single_line(dumps({ "method": method_name, "args": kwargs, "id": request.id }, ensure_ascii=False)) - self._method_call_requests[request.id] = request - - return await request.wait_for_result() - - def start(self): - if self.passive: - return self - Process(target=self.sandboxed_plugin.initialize, args=[self._socket]).start() - self._listener_task = create_task(self._response_listener()) - return self - - def stop(self): - self._listener_task.cancel() - async def _(self: PluginWrapper): - await self._socket.write_single_line(dumps({ "stop": True }, ensure_ascii=False)) - await self._socket.close_socket_connection() - create_task(_(self)) \ No newline at end of file diff --git a/backend/src/plugin/sandboxed_plugin.py b/backend/src/plugin/sandboxed_plugin.py deleted file mode 100644 index adf9f802..00000000 --- a/backend/src/plugin/sandboxed_plugin.py +++ /dev/null @@ -1,138 +0,0 @@ -from os import path, environ -from signal import SIGINT, signal -from importlib.util import module_from_spec, spec_from_file_location -from json import dumps, loads -from logging import getLogger -from sys import exit, path as syspath, modules as sysmodules -from traceback import format_exc -from asyncio import (get_event_loop, new_event_loop, - set_event_loop, sleep) - -from .method_call_request import SocketResponseDict -from ..localplatform.localsocket import LocalSocket -from ..localplatform.localplatform import setgid, setuid, get_username, get_home_path -from ..customtypes import UserType -from .. import helpers - -from typing import Any, Dict, List - -class SandboxedPlugin: - def __init__(self, - name: str, - passive: bool, - flags: List[str], - file: str, - plugin_directory: str, - plugin_path: str, - version: str|None, - author: str) -> None: - self.name = name - self.passive = passive - self.flags = flags - self.file = file - self.plugin_path = plugin_path - self.plugin_directory = plugin_directory - self.version = version - self.author = author - - self.log = getLogger("plugin") - - def initialize(self, socket: LocalSocket): - self._socket = socket - - try: - signal(SIGINT, lambda s, f: exit(0)) - - set_event_loop(new_event_loop()) - if self.passive: - return - setgid(UserType.ROOT if "root" in self.flags else UserType.HOST_USER) - setuid(UserType.ROOT if "root" in self.flags else UserType.HOST_USER) - # export a bunch of environment variables to help plugin developers - environ["HOME"] = get_home_path(UserType.ROOT if "root" in self.flags else UserType.HOST_USER) - environ["USER"] = "root" if "root" in self.flags else get_username() - environ["DECKY_VERSION"] = helpers.get_loader_version() - environ["DECKY_USER"] = get_username() - environ["DECKY_USER_HOME"] = helpers.get_home_path() - environ["DECKY_HOME"] = helpers.get_homebrew_path() - environ["DECKY_PLUGIN_SETTINGS_DIR"] = path.join(environ["DECKY_HOME"], "settings", self.plugin_directory) - helpers.mkdir_as_user(path.join(environ["DECKY_HOME"], "settings")) - helpers.mkdir_as_user(environ["DECKY_PLUGIN_SETTINGS_DIR"]) - environ["DECKY_PLUGIN_RUNTIME_DIR"] = path.join(environ["DECKY_HOME"], "data", self.plugin_directory) - helpers.mkdir_as_user(path.join(environ["DECKY_HOME"], "data")) - helpers.mkdir_as_user(environ["DECKY_PLUGIN_RUNTIME_DIR"]) - environ["DECKY_PLUGIN_LOG_DIR"] = path.join(environ["DECKY_HOME"], "logs", self.plugin_directory) - helpers.mkdir_as_user(path.join(environ["DECKY_HOME"], "logs")) - helpers.mkdir_as_user(environ["DECKY_PLUGIN_LOG_DIR"]) - environ["DECKY_PLUGIN_DIR"] = path.join(self.plugin_path, self.plugin_directory) - environ["DECKY_PLUGIN_NAME"] = self.name - if self.version: - environ["DECKY_PLUGIN_VERSION"] = self.version - environ["DECKY_PLUGIN_AUTHOR"] = self.author - - # append the plugin's `py_modules` to the recognized python paths - syspath.append(path.join(environ["DECKY_PLUGIN_DIR"], "py_modules")) - - #TODO: FIX IN A LESS CURSED WAY - keys = [key.replace("src.", "") for key in sysmodules if key.startswith("src.")] - for key in keys: - sysmodules[key] = sysmodules["src"].__dict__[key] - - spec = spec_from_file_location("_", self.file) - assert spec is not None - module = module_from_spec(spec) - assert spec.loader is not None - spec.loader.exec_module(module) - self.Plugin = module.Plugin - - setattr(self.Plugin, "emit_message", self.emit_message) - #TODO: Find how to put emit_message on global namespace so it doesn't pollute Plugin - - if hasattr(self.Plugin, "_migration"): - get_event_loop().run_until_complete(self.Plugin._migration(self.Plugin)) - if hasattr(self.Plugin, "_main"): - get_event_loop().create_task(self.Plugin._main(self.Plugin)) - get_event_loop().create_task(socket.setup_server()) - get_event_loop().run_forever() - except: - self.log.error("Failed to start " + self.name + "!\n" + format_exc()) - exit(0) - - async def _unload(self): - try: - self.log.info("Attempting to unload with plugin " + self.name + "'s \"_unload\" function.\n") - if hasattr(self.Plugin, "_unload"): - await self.Plugin._unload(self.Plugin) - self.log.info("Unloaded " + self.name + "\n") - else: - self.log.info("Could not find \"_unload\" in " + self.name + "'s main.py" + "\n") - except: - self.log.error("Failed to unload " + self.name + "!\n" + format_exc()) - exit(0) - - async def on_new_message(self, message : str) -> str|None: - data = loads(message) - - if "stop" in data: - self.log.info("Calling Loader unload function.") - await self._unload() - get_event_loop().stop() - while get_event_loop().is_running(): - await sleep(0) - get_event_loop().close() - raise Exception("Closing message listener") - - d: SocketResponseDict = {"res": None, "success": True, "id": data["id"]} - try: - d["res"] = await getattr(self.Plugin, data["method"])(self.Plugin, **data["args"]) - except Exception as e: - d["res"] = str(e) - d["success"] = False - finally: - return dumps(d, ensure_ascii=False) - - async def emit_message(self, message: Dict[Any, Any]): - await self._socket.write_single_line_server(dumps({ - "id": "0", - "payload": message - })) \ No newline at end of file diff --git a/backend/src/settings.py b/backend/src/settings.py deleted file mode 100644 index c0f2b90c..00000000 --- a/backend/src/settings.py +++ /dev/null @@ -1,60 +0,0 @@ -from json import dump, load -from os import mkdir, path, listdir, rename -from typing import Any, Dict -from .localplatform.localplatform import chown, folder_owner, get_chown_plugin_path -from .customtypes import UserType - -from .helpers import get_homebrew_path - - -class SettingsManager: - def __init__(self, name: str, settings_directory: str | None = None) -> None: - wrong_dir = get_homebrew_path() - if settings_directory == None: - settings_directory = path.join(wrong_dir, "settings") - - self.path = path.join(settings_directory, name + ".json") - - #Create the folder with the correct permission - if not path.exists(settings_directory): - mkdir(settings_directory) - - #Copy all old settings file in the root directory to the correct folder - for file in listdir(wrong_dir): - if file.endswith(".json"): - rename(path.join(wrong_dir,file), - path.join(settings_directory, file)) - self.path = path.join(settings_directory, name + ".json") - - - #If the owner of the settings directory is not the user, then set it as the user: - expected_user = UserType.HOST_USER if get_chown_plugin_path() else UserType.ROOT - if folder_owner(settings_directory) != expected_user: - chown(settings_directory, expected_user, False) - - self.settings: Dict[str, Any] = {} - - try: - open(self.path, "x", encoding="utf-8") - except FileExistsError as _: - self.read() - pass - - def read(self): - try: - with open(self.path, "r", encoding="utf-8") as file: - self.settings = load(file) - except Exception as e: - print(e) - pass - - def commit(self): - with open(self.path, "w+", encoding="utf-8") as file: - dump(self.settings, file, indent=4, ensure_ascii=False) - - def getSetting(self, key: str, default: Any = None) -> Any: - return self.settings.get(key, default) - - def setSetting(self, key: str, value: Any) -> Any: - self.settings[key] = value - self.commit() diff --git a/backend/src/updater.py b/backend/src/updater.py deleted file mode 100644 index f8aef429..00000000 --- a/backend/src/updater.py +++ /dev/null @@ -1,238 +0,0 @@ -from __future__ import annotations -import os -import shutil -from asyncio import sleep -from json.decoder import JSONDecodeError -from logging import getLogger -from os import getcwd, path, remove -from typing import TYPE_CHECKING, List, TypedDict -if TYPE_CHECKING: - from .main import PluginManager -from .localplatform.localplatform import chmod, service_restart, ON_LINUX, get_keep_systemd_service, get_selinux - -from aiohttp import ClientSession, web - -from . import helpers -from .injector import get_gamepadui_tab -from .settings import SettingsManager - -logger = getLogger("Updater") - -class RemoteVerAsset(TypedDict): - name: str - browser_download_url: str -class RemoteVer(TypedDict): - tag_name: str - prerelease: bool - assets: List[RemoteVerAsset] - -class Updater: - def __init__(self, context: PluginManager) -> None: - self.context = context - self.settings = self.context.settings - # Exposes updater methods to frontend - self.updater_methods = { - "get_branch": self._get_branch, - "get_version": self.get_version, - "do_update": self.do_update, - "do_restart": self.do_restart, - "check_for_updates": self.check_for_updates - } - self.remoteVer: RemoteVer | None = None - self.allRemoteVers: List[RemoteVer] = [] - self.localVer = helpers.get_loader_version() - - try: - self.currentBranch = self.get_branch(self.context.settings) - except: - self.currentBranch = 0 - logger.error("Current branch could not be determined, defaulting to \"Stable\"") - - if context: - context.web_app.add_routes([ - web.post("/updater/{method_name}", self._handle_server_method_call) - ]) - context.loop.create_task(self.version_reloader()) - - async def _handle_server_method_call(self, request: web.Request): - method_name = request.match_info["method_name"] - try: - args = await request.json() - except JSONDecodeError: - args = {} - res = {} - try: - r = await self.updater_methods[method_name](**args) # type: ignore - res["result"] = r - res["success"] = True - except Exception as e: - res["result"] = str(e) - res["success"] = False - return web.json_response(res) - - def get_branch(self, manager: SettingsManager): - ver = manager.getSetting("branch", -1) - logger.debug("current branch: %i" % ver) - if ver == -1: - logger.info("Current branch is not set, determining branch from version...") - if self.localVer.startswith("v") and "-pre" in self.localVer: - logger.info("Current version determined to be pre-release") - manager.setSetting('branch', 1) - return 1 - else: - logger.info("Current version determined to be stable") - manager.setSetting('branch', 0) - return 0 - return ver - - async def _get_branch(self, manager: SettingsManager): - return self.get_branch(manager) - - # retrieve relevant service file's url for each branch - def get_service_url(self): - logger.debug("Getting service URL") - branch = self.get_branch(self.context.settings) - match branch: - case 0: - url = "https://raw.githubusercontent.com/SteamDeckHomebrew/decky-loader/main/dist/plugin_loader-release.service" - case 1 | 2: - url = "https://raw.githubusercontent.com/SteamDeckHomebrew/decky-loader/main/dist/plugin_loader-prerelease.service" - case _: - logger.error("You have an invalid branch set... Defaulting to prerelease service, please send the logs to the devs!") - url = "https://raw.githubusercontent.com/SteamDeckHomebrew/decky-loader/main/dist/plugin_loader-prerelease.service" - return str(url) - - async def get_version(self): - return { - "current": self.localVer, - "remote": self.remoteVer, - "all": self.allRemoteVers, - "updatable": self.localVer != "unknown" - } - - async def check_for_updates(self): - logger.debug("checking for updates") - selectedBranch = self.get_branch(self.context.settings) - async with ClientSession() as web: - async with web.request("GET", "https://api.github.com/repos/SteamDeckHomebrew/decky-loader/releases", ssl=helpers.get_ssl_context()) as res: - remoteVersions: List[RemoteVer] = await res.json() - if selectedBranch == 0: - logger.debug("release type: release") - remoteVersions = list(filter(lambda ver: ver["tag_name"].startswith("v") and not ver["prerelease"] and not ver["tag_name"].find("-pre") > 0 and ver["tag_name"], remoteVersions)) - elif selectedBranch == 1: - logger.debug("release type: pre-release") - remoteVersions = list(filter(lambda ver:ver["tag_name"].startswith("v"), remoteVersions)) - else: - logger.error("release type: NOT FOUND") - raise ValueError("no valid branch found") - self.allRemoteVers = remoteVersions - logger.debug("determining release type to find, branch is %i" % selectedBranch) - if selectedBranch == 0: - logger.debug("release type: release") - self.remoteVer = next(filter(lambda ver: ver["tag_name"].startswith("v") and not ver["prerelease"] and not ver["tag_name"].find("-pre") > 0 and ver["tag_name"], remoteVersions), None) - elif selectedBranch == 1: - logger.debug("release type: pre-release") - self.remoteVer = next(filter(lambda ver:ver["tag_name"].startswith("v"), remoteVersions), None) - else: - logger.error("release type: NOT FOUND") - raise ValueError("no valid branch found") - logger.info("Updated remote version information") - tab = await get_gamepadui_tab() - await tab.evaluate_js(f"window.DeckyPluginLoader.notifyUpdates()", False, True, False) - return await self.get_version() - - async def version_reloader(self): - await sleep(30) - while True: - try: - await self.check_for_updates() - except: - pass - await sleep(60 * 60 * 6) # 6 hours - - async def do_update(self): - logger.debug("Starting update.") - try: - assert self.remoteVer - except AssertionError: - logger.error("Unable to update as remoteVer is missing") - return - - version = self.remoteVer["tag_name"] - download_url = None - download_filename = "PluginLoader" if ON_LINUX else "PluginLoader.exe" - download_temp_filename = download_filename + ".new" - - for x in self.remoteVer["assets"]: - if x["name"] == download_filename: - download_url = x["browser_download_url"] - break - - if download_url == None: - raise Exception("Download url not found") - - service_url = self.get_service_url() - logger.debug("Retrieved service URL") - - tab = await get_gamepadui_tab() - await tab.open_websocket() - async with ClientSession() as web: - if ON_LINUX and not get_keep_systemd_service(): - logger.debug("Downloading systemd service") - # download the relevant systemd service depending upon branch - async with web.request("GET", service_url, ssl=helpers.get_ssl_context(), allow_redirects=True) as res: - logger.debug("Downloading service file") - data = await res.content.read() - logger.debug(str(data)) - service_file_path = path.join(getcwd(), "plugin_loader.service") - try: - with open(path.join(getcwd(), "plugin_loader.service"), "wb") as out: - out.write(data) - except Exception as e: - logger.error(f"Error at %s", exc_info=e) - with open(path.join(getcwd(), "plugin_loader.service"), "r", encoding="utf-8") as service_file: - service_data = service_file.read() - service_data = service_data.replace("${HOMEBREW_FOLDER}", helpers.get_homebrew_path()) - with open(path.join(getcwd(), "plugin_loader.service"), "w", encoding="utf-8") as service_file: - service_file.write(service_data) - - logger.debug("Saved service file") - logger.debug("Copying service file over current file.") - shutil.copy(service_file_path, "/etc/systemd/system/plugin_loader.service") - if not os.path.exists(path.join(getcwd(), ".systemd")): - os.mkdir(path.join(getcwd(), ".systemd")) - shutil.move(service_file_path, path.join(getcwd(), ".systemd")+"/plugin_loader.service") - - logger.debug("Downloading binary") - async with web.request("GET", download_url, ssl=helpers.get_ssl_context(), allow_redirects=True) as res: - total = int(res.headers.get('content-length', 0)) - with open(path.join(getcwd(), download_temp_filename), "wb") as out: - progress = 0 - raw = 0 - async for c in res.content.iter_chunked(512): - out.write(c) - raw += len(c) - new_progress = round((raw / total) * 100) - if progress != new_progress: - self.context.loop.create_task(tab.evaluate_js(f"window.DeckyUpdater.updateProgress({new_progress})", False, False, False)) - progress = new_progress - - with open(path.join(getcwd(), ".loader.version"), "w", encoding="utf-8") as out: - out.write(version) - - if ON_LINUX: - remove(path.join(getcwd(), download_filename)) - shutil.move(path.join(getcwd(), download_temp_filename), path.join(getcwd(), download_filename)) - chmod(path.join(getcwd(), download_filename), 777, False) - if get_selinux(): - from asyncio.subprocess import create_subprocess_exec - process = await create_subprocess_exec("chcon", "-t", "bin_t", path.join(getcwd(), download_filename)) - logger.info(f"Setting the executable flag with chcon returned {await process.wait()}") - - logger.info("Updated loader installation.") - await tab.evaluate_js("window.DeckyUpdater.finish()", False, False) - await self.do_restart() - await tab.close_websocket() - - async def do_restart(self): - await service_restart("plugin_loader") diff --git a/backend/src/utilities.py b/backend/src/utilities.py deleted file mode 100644 index f04ed371..00000000 --- a/backend/src/utilities.py +++ /dev/null @@ -1,373 +0,0 @@ -from __future__ import annotations -from os import stat_result -import uuid -from json.decoder import JSONDecodeError -from os.path import splitext -import re -from traceback import format_exc -from stat import FILE_ATTRIBUTE_HIDDEN # type: ignore - -from asyncio import StreamReader, StreamWriter, start_server, gather, open_connection -from aiohttp import ClientSession, web -from typing import TYPE_CHECKING, Callable, Coroutine, Dict, Any, List, TypedDict - -from logging import getLogger -from pathlib import Path - -from .browser import PluginInstallRequest, PluginInstallType -if TYPE_CHECKING: - from .main import PluginManager -from .injector import inject_to_tab, get_gamepadui_tab, close_old_tabs, get_tab -from .localplatform.localplatform import ON_WINDOWS -from . import helpers -from .localplatform.localplatform import service_stop, service_start, get_home_path, get_username - -class FilePickerObj(TypedDict): - file: Path - filest: stat_result - is_dir: bool - -class Utilities: - def __init__(self, context: PluginManager) -> None: - self.context = context - self.util_methods: Dict[str, Callable[..., Coroutine[Any, Any, Any]]] = { - "ping": self.ping, - "http_request": self.http_request, - "install_plugin": self.install_plugin, - "install_plugins": self.install_plugins, - "cancel_plugin_install": self.cancel_plugin_install, - "confirm_plugin_install": self.confirm_plugin_install, - "uninstall_plugin": self.uninstall_plugin, - "execute_in_tab": self.execute_in_tab, - "inject_css_into_tab": self.inject_css_into_tab, - "remove_css_from_tab": self.remove_css_from_tab, - "allow_remote_debugging": self.allow_remote_debugging, - "disallow_remote_debugging": self.disallow_remote_debugging, - "set_setting": self.set_setting, - "get_setting": self.get_setting, - "filepicker_ls": self.filepicker_ls, - "disable_rdt": self.disable_rdt, - "enable_rdt": self.enable_rdt, - "get_tab_id": self.get_tab_id, - "get_user_info": self.get_user_info, - } - - self.logger = getLogger("Utilities") - - self.rdt_proxy_server = None - self.rdt_script_id = None - self.rdt_proxy_task = None - - if context: - context.web_app.add_routes([ - web.post("/methods/{method_name}", self._handle_server_method_call) - ]) - - async def _handle_server_method_call(self, request: web.Request): - method_name = request.match_info["method_name"] - try: - args = await request.json() - except JSONDecodeError: - args = {} - res = {} - try: - r = await self.util_methods[method_name](**args) - res["result"] = r - res["success"] = True - except Exception as e: - res["result"] = str(e) - res["success"] = False - return web.json_response(res) - - async def install_plugin(self, artifact: str="", name: str="No name", version: str="dev", hash: str="", install_type: PluginInstallType=PluginInstallType.INSTALL): - return await self.context.plugin_browser.request_plugin_install( - artifact=artifact, - name=name, - version=version, - hash=hash, - install_type=install_type - ) - - async def install_plugins(self, requests: List[PluginInstallRequest]): - return await self.context.plugin_browser.request_multiple_plugin_installs( - requests=requests - ) - - async def confirm_plugin_install(self, request_id: str): - return await self.context.plugin_browser.confirm_plugin_install(request_id) - - async def cancel_plugin_install(self, request_id: str): - return self.context.plugin_browser.cancel_plugin_install(request_id) - - async def uninstall_plugin(self, name: str): - return await self.context.plugin_browser.uninstall_plugin(name) - - async def http_request(self, method: str="", url: str="", **kwargs: Any): - async with ClientSession() as web: - res = await web.request(method, url, ssl=helpers.get_ssl_context(), **kwargs) - text = await res.text() - return { - "status": res.status, - "headers": dict(res.headers), - "body": text - } - - async def ping(self, **kwargs: Any): - return "pong" - - async def execute_in_tab(self, tab: str, run_async: bool, code: str): - try: - result = await inject_to_tab(tab, code, run_async) - assert result - if "exceptionDetails" in result["result"]: - return { - "success": False, - "result": result["result"] - } - - return { - "success": True, - "result": result["result"]["result"].get("value") - } - except Exception as e: - return { - "success": False, - "result": e - } - - async def inject_css_into_tab(self, tab: str, style: str): - try: - css_id = str(uuid.uuid4()) - - result = await inject_to_tab(tab, - f""" - (function() {{ - const style = document.createElement('style'); - style.id = "{css_id}"; - document.head.append(style); - style.textContent = `{style}`; - }})() - """, False) - - if result and "exceptionDetails" in result["result"]: - return { - "success": False, - "result": result["result"] - } - - return { - "success": True, - "result": css_id - } - except Exception as e: - return { - "success": False, - "result": e - } - - async def remove_css_from_tab(self, tab: str, css_id: str): - try: - result = await inject_to_tab(tab, - f""" - (function() {{ - let style = document.getElementById("{css_id}"); - - if (style.nodeName.toLowerCase() == 'style') - style.parentNode.removeChild(style); - }})() - """, False) - - if result and "exceptionDetails" in result["result"]: - return { - "success": False, - "result": result - } - - return { - "success": True - } - except Exception as e: - return { - "success": False, - "result": e - } - - async def get_setting(self, key: str, default: Any): - return self.context.settings.getSetting(key, default) - - async def set_setting(self, key: str, value: Any): - return self.context.settings.setSetting(key, value) - - async def allow_remote_debugging(self): - await service_start(helpers.REMOTE_DEBUGGER_UNIT) - return True - - async def disallow_remote_debugging(self): - await service_stop(helpers.REMOTE_DEBUGGER_UNIT) - return True - - async def filepicker_ls(self, - path : str | None = None, - include_files: bool = True, - include_folders: bool = True, - include_ext: list[str] = [], - include_hidden: bool = False, - order_by: str = "name_asc", - filter_for: str | None = None, - page: int = 1, - max: int = 1000): - - if path == None: - path = get_home_path() - - path_obj = Path(path).resolve() - - files: List[FilePickerObj] = [] - folders: List[FilePickerObj] = [] - - #Resolving all files/folders in the requested directory - for file in path_obj.iterdir(): - if file.exists(): - filest = file.stat() - is_hidden = file.name.startswith('.') - if ON_WINDOWS and not is_hidden: - is_hidden = bool(filest.st_file_attributes & FILE_ATTRIBUTE_HIDDEN) # type: ignore - if include_folders and file.is_dir(): - if (is_hidden and include_hidden) or not is_hidden: - folders.append({"file": file, "filest": filest, "is_dir": True}) - elif include_files: - # Handle requested extensions if present - if len(include_ext) == 0 or 'all_files' in include_ext \ - or splitext(file.name)[1].lstrip('.') in include_ext: - if (is_hidden and include_hidden) or not is_hidden: - files.append({"file": file, "filest": filest, "is_dir": False}) - # Filter logic - if filter_for is not None: - try: - if re.compile(filter_for): - files = list(filter(lambda file: re.search(filter_for, file["file"].name) != None, files)) - except re.error: - files = list(filter(lambda file: file["file"].name.find(filter_for) != -1, files)) - - # Ordering logic - ord_arg = order_by.split("_") - ord = ord_arg[0] - rev = True if ord_arg[1] == "asc" else False - match ord: - case 'name': - files.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) - folders.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) - case 'modified': - files.sort(key=lambda x: x['filest'].st_mtime, reverse = not rev) - folders.sort(key=lambda x: x['filest'].st_mtime, reverse = not rev) - case 'created': - files.sort(key=lambda x: x['filest'].st_ctime, reverse = not rev) - folders.sort(key=lambda x: x['filest'].st_ctime, reverse = not rev) - case 'size': - files.sort(key=lambda x: x['filest'].st_size, reverse = not rev) - # Folders has no file size, order by name instead - folders.sort(key=lambda x: x['file'].name.casefold()) - case _: - files.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) - folders.sort(key=lambda x: x['file'].name.casefold(), reverse = rev) - - #Constructing the final file list, folders first - all = [{ - "isdir": x['is_dir'], - "name": str(x['file'].name), - "realpath": str(x['file']), - "size": x['filest'].st_size, - "modified": x['filest'].st_mtime, - "created": x['filest'].st_ctime, - } for x in folders + files ] - - return { - "realpath": str(path), - "files": all[(page-1)*max:(page)*max], - "total": len(all), - } - - - # Based on https://stackoverflow.com/a/46422554/13174603 - def start_rdt_proxy(self, ip: str, port: int): - async def pipe(reader: StreamReader, writer: StreamWriter): - try: - while not reader.at_eof(): - writer.write(await reader.read(2048)) - finally: - writer.close() - async def handle_client(local_reader: StreamReader, local_writer: StreamWriter): - try: - remote_reader, remote_writer = await open_connection( - ip, port) - pipe1 = pipe(local_reader, remote_writer) - pipe2 = pipe(remote_reader, local_writer) - await gather(pipe1, pipe2) - finally: - local_writer.close() - - self.rdt_proxy_server = start_server(handle_client, "127.0.0.1", port) - self.rdt_proxy_task = self.context.loop.create_task(self.rdt_proxy_server) - - def stop_rdt_proxy(self): - if self.rdt_proxy_server != None: - self.rdt_proxy_server.close() - if self.rdt_proxy_task: - self.rdt_proxy_task.cancel() - - async def _enable_rdt(self): - # TODO un-hardcode port - try: - self.stop_rdt_proxy() - ip = self.context.settings.getSetting("developer.rdt.ip", None) - - if ip != None: - self.logger.info("Connecting to React DevTools at " + ip) - async with ClientSession() as web: - res = await web.request("GET", "http://" + ip + ":8097", ssl=helpers.get_ssl_context()) - script = """ - if (!window.deckyHasConnectedRDT) { - window.deckyHasConnectedRDT = true; - // This fixes the overlay when hovering over an element in RDT - Object.defineProperty(window, '__REACT_DEVTOOLS_TARGET_WINDOW__', { - enumerable: true, - configurable: true, - get: function() { - return (GamepadNavTree?.m_context?.m_controller || FocusNavController)?.m_ActiveContext?.ActiveWindow || window; - } - }); - """ + await res.text() + "\n}" - if res.status != 200: - self.logger.error("Failed to connect to React DevTools at " + ip) - return False - self.start_rdt_proxy(ip, 8097) - self.logger.info("Connected to React DevTools, loading script") - tab = await get_gamepadui_tab() - # RDT needs to load before React itself to work. - await close_old_tabs() - result = await tab.reload_and_evaluate(script) - self.logger.info(result) - - except Exception: - self.logger.error("Failed to connect to React DevTools") - self.logger.error(format_exc()) - - async def enable_rdt(self): - self.context.loop.create_task(self._enable_rdt()) - - async def disable_rdt(self): - self.logger.info("Disabling React DevTools") - tab = await get_gamepadui_tab() - self.rdt_script_id = None - await close_old_tabs() - await tab.evaluate_js("location.reload();", False, True, False) - self.logger.info("React DevTools disabled") - - async def get_user_info(self) -> Dict[str, str]: - return { - "username": get_username(), - "path": get_home_path() - } - - async def get_tab_id(self, name: str): - return (await get_tab(name)).id -- cgit v1.2.3