diff --git a/pythonFiles/lib/jedilsp/Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst b/pythonFiles/lib/jedilsp/Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst new file mode 100644 index 0000000000000..ee03e933f35d6 --- /dev/null +++ b/pythonFiles/lib/jedilsp/Misc/NEWS.d/next/Library/2021-05-14-16-06-02.bpo-44095.v_pLwY.rst @@ -0,0 +1,2 @@ +:class:`zipfile.Path` now supports :attr:`zipfile.Path.stem`, +:attr:`zipfile.Path.suffixes`, and :attr:`zipfile.Path.suffix` attributes. diff --git a/pythonFiles/lib/jedilsp/README.md b/pythonFiles/lib/jedilsp/README.md new file mode 100644 index 0000000000000..9274720d9ea8b --- /dev/null +++ b/pythonFiles/lib/jedilsp/README.md @@ -0,0 +1,268 @@ +# jedi-language-server + +[![image-version](https://img.shields.io/pypi/v/jedi-language-server.svg)](https://python.org/pypi/jedi-language-server) +[![image-license](https://img.shields.io/pypi/l/jedi-language-server.svg)](https://python.org/pypi/jedi-language-server) +[![image-python-versions](https://img.shields.io/badge/python->=3.7-blue)](https://python.org/pypi/jedi-language-server) +[![image-pypi-downloads](https://pepy.tech/badge/jedi-language-server)](https://pepy.tech/project/jedi-language-server) +[![github-action-testing](https://github.com/pappasam/jedi-language-server/actions/workflows/testing.yaml/badge.svg)](https://github.com/pappasam/jedi-language-server/actions/workflows/testing.yaml) + +A [Language Server](https://microsoft.github.io/language-server-protocol/) for the latest version(s) of [Jedi](https://jedi.readthedocs.io/en/latest/). If using Neovim/Vim, we recommend using with [coc-jedi](https://github.com/pappasam/coc-jedi). Supports Python versions 3.7 and newer. + +**Note:** this tool is actively used by its primary author. He's happy to review pull requests / respond to issues you may discover. + +## Installation + +Some frameworks, like coc-jedi and vscode-python, will install and manage jedi-language-server for you. If you're setting up manually, you can run the following from your command line (bash / zsh): + +```bash +pip install -U jedi-language-server +``` + +Alternatively (and preferably), use [pipx](https://github.com/pipxproject/pipx) to keep jedi-language-server and its dependencies isolated from your other Python dependencies. Don't worry, jedi is smart enough to figure out which Virtual environment you're currently using! + +## Capabilities + +jedi-language-server aims to support Jedi's capabilities and expose them through the Language Server Protocol. It supports the following Language Server capabilities: + +### Language Features + +- [completionItem/resolve](https://microsoft.github.io/language-server-protocol/specification#completionItem_resolve) +- [textDocument/codeAction](https://microsoft.github.io/language-server-protocol/specification#textDocument_codeAction) (refactor.inline, refactor.extract) +- [textDocument/completion](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_completion) +- [textDocument/definition](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_definition) +- [textDocument/documentHighlight](https://microsoft.github.io/language-server-protocol/specification#textDocument_documentHighlight) +- [textDocument/documentSymbol](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_documentSymbol) +- [textDocument/typeDefinition](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_typeDefinition) +- [textDocument/hover](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_hover) +- [textDocument/publishDiagnostics](https://microsoft.github.io/language-server-protocol/specification#textDocument_publishDiagnostics) +- [textDocument/references](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_references) +- [textDocument/rename](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#textDocument_rename) +- [textDocument/signatureHelp](https://microsoft.github.io/language-server-protocol/specification#textDocument_signatureHelp) +- [workspace/symbol](https://microsoft.github.io/language-server-protocol/specifications/specification-current/#workspace_symbol) + +### Text Synchronization (for diagnostics) + +- [textDocument/didChange](https://microsoft.github.io/language-server-protocol/specification#textDocument_didChange) +- [textDocument/didOpen](https://microsoft.github.io/language-server-protocol/specification#textDocument_didOpen) +- [textDocument/didSave](https://microsoft.github.io/language-server-protocol/specification#textDocument_didSave) + +## Editor Setup + +The following instructions show how to use jedi-language-server with your development tooling. The instructions assume you have already installed jedi-language-server. + +### Vim / Neovim + +Users may choose 1 of the following options: + +- [coc.nvim](https://github.com/neoclide/coc.nvim) with [coc-jedi](https://github.com/pappasam/coc-jedi). +- [ALE](https://github.com/dense-analysis/ale). +- [Neovim's native LSP client](https://neovim.io/doc/user/lsp.html). See [here](https://github.com/neovim/nvim-lspconfig/blob/master/doc/server_configurations.md#jedi_language_server) for an example configuration. +- [vim-lsp](https://github.com/prabirshrestha/vim-lsp). + +Note: this list is non-exhaustive. If you know of a great choice not included in this list, please submit a PR! + +### Emacs + +Users may choose one of the following options: + +- [lsp-jedi](https://github.com/fredcamps/lsp-jedi). +- [eglot](https://github.com/joaotavora/eglot) + +Note: this list is non-exhaustive. If you know of a great choice not included in this list, please submit a PR! + +### Visual Studio Code (vscode) + +Starting from the [October 2021 release](https://github.com/microsoft/vscode-python/releases/tag/2021.10.1317843341), set the `python.languageServer` setting to `Jedi` to use jedi-language-server. + +Note: This does not support Python 2.7. + +See: + +## Command line + +jedi-language-server can be run directly from the command line. + +```console +$ jedi-language-server --help +usage: jedi-language-server [-h] [--version] [--tcp] [--ws] [--host HOST] [--port PORT] [--log-file LOG_FILE] [-v] + +Jedi language server: an LSP wrapper for jedi. + +optional arguments: + -h, --help show this help message and exit + --version display version information and exit + --tcp use TCP web server instead of stdio + --ws use web socket server instead of stdio + --host HOST host for web server (default 127.0.0.1) + --port PORT port for web server (default 2087) + --log-file LOG_FILE redirect logs to file specified + -v, --verbose increase verbosity of log output + +Examples: + + Run over stdio : jedi-language-server + Run over tcp : jedi-language-server --tcp + Run over websockets: + # only need to pip install once per env + pip install pygls[ws] + jedi-language-server --ws + +Notes: + + For use with web sockets, user must first run + 'pip install pygls[ws]' to install the correct + version of the websockets library. +``` + +If testing sending requests over stdio manually from the command line, you must include Windows-style line endings: `\r\n` . For an example, from within this project, run the following: + +```console +$ jedi-language-server < ./example-initialization-request.txt +INFO:pygls.server:Starting IO server +INFO:pygls.feature_manager:Registered "textDocument/didOpen" with options "None" +INFO:pygls.feature_manager:Registered "textDocument/didChange" with options "None" +INFO:pygls.feature_manager:Registered "textDocument/didSave" with options "None" +INFO:pygls.feature_manager:Registered "textDocument/hover" with options "None" +INFO:pygls.protocol:Language server initialized work_done_token=None process_id=None root_uri='file:///home/ubuntu/artifacts/' capabilities=ClientCapabilities(workspace=WorkspaceClientCapabilities(apply_edit=None, workspace_edit=None, did_change_configuration=DidChangeConfigurationClientCapabilities(dynamic_registration=True), did_change_watched_files=None, symbol=None, execute_command=None, workspace_folders=None, configuration=None, semantic_tokens=None, code_lens=None, file_operations=None), text_document=TextDocumentClientCapabilities(synchronization=TextDocumentSyncClientCapabilities(dynamic_registration=True, will_save=False, will_save_wait_until=False, did_save=False), completion=CompletionClientCapabilities(dynamic_registration=True, completion_item=CompletionItemClientCapabilities(snippet_support=False, commit_characters_support=True, documentation_format=[, ], deprecated_support=False, preselect_support=False, tag_support=None, insert_replace_support=None, resolve_support=None, insert_text_mode_support=None), completion_item_kind=None, context_support=False), hover=HoverClientCapabilities(dynamic_registration=True, content_format=[, ]), signature_help=SignatureHelpClientCapabilities(dynamic_registration=True, signature_information=SignatureHelpInformationClientCapabilities(documentation_format=[, ], parameter_information=None, active_parameter_support=None), context_support=None), declaration=DeclarationClientCapabilities(dynamic_registration=True, link_support=True), definition=DefinitionClientCapabilities(dynamic_registration=True, link_support=True), type_definition=TypeDefinitionClientCapabilities(dynamic_registration=True, link_support=True), implementation=ImplementationClientCapabilities(dynamic_registration=True, link_support=True), references=None, document_highlight=None, document_symbol=None, code_action=None, code_lens=None, document_link=None, color_provider=None, formatting=None, range_formatting=None, on_type_formatting=None, rename=None, publish_diagnostics=None, folding_range=None, selection_range=None, linked_editing_range=None, call_hierarchy=None, semantic_tokens=None, moniker=None), window=None, general=None, experimental=None) client_info=None locale=None root_path=None initialization_options=None trace=None workspace_folders=None +INFO:pygls.protocol:Sending data: {"jsonrpc": "2.0", "id": 0, "result": {"capabilities": {"textDocumentSync": {"openClose": true, "change": 2, "willSave": false, "willSaveWaitUntil": false, "save": true}, "completionProvider": {"triggerCharacters": [".", "'", "\""], "resolveProvider": true}, "hoverProvider": true, "signatureHelpProvider": {"triggerCharacters": ["(", ","]}, "definitionProvider": true, "referencesProvider": true, "documentHighlightProvider": true, "documentSymbolProvider": true, "codeActionProvider": {"codeActionKinds": ["refactor.inline", "refactor.extract"]}, "renameProvider": true, "executeCommandProvider": {"commands": []}, "workspaceSymbolProvider": true, "workspace": {"workspaceFolders": {"supported": true, "changeNotifications": true}, "fileOperations": {}}}}} +Content-Length: 758 +Content-Type: application/vscode-jsonrpc; charset=utf-8 + +{"jsonrpc": "2.0", "id": 0, "result": {"capabilities": {"textDocumentSync": {"openClose": true, "change": 2, "willSave": false, "willSaveWaitUntil": false, "save": true}, "completionProvider": {"triggerCharacters": [".", "'", "\""], "resolveProvider": true}, "hoverProvider": true, "signatureHelpProvider": {"triggerCharacters": ["(", ","]}, "definitionProvider": true, "referencesProvider": true, "documentHighlightProvider": true, "documentSymbolProvider": true, "codeActionProvider": {"codeActionKinds": ["refactor.inline", "refactor.extract"]}, "renameProvider": true, "executeCommandProvider": {"commands": []}, "workspaceSymbolProvider": true, "workspace": {"workspaceFolders": {"supported": true, "changeNotifications": true}, "fileOperations": {}}}}}INFO:pygls.server:Shutting down the server +INFO:pygls.server:Closing the event loop. +``` + +If testing interactively, be sure to manually insert carriage returns. Although this may differ between shell environments, within most bash terminals, you can explicitly insert the required line endings by typing ``, which will insert a `^M`. See: + +```console +$ jedi-language-server 2>logs +Content-Length: 1062^M +^M +{"jsonrpc":"2.0","id":0,"method":"initialize","params":{"capabilities":{"textDocument":{"hover":{"dynamicRegistration":true,"contentFormat":["plaintext","markdown"]},"synchronization":{"dynamicRegistration":true,"willSave":false,"didSave":false,"willSaveWaitUntil":false},"completion":{"dynamicRegistration":true,"completionItem":{"snippetSupport":false,"commitCharactersSupport":true,"documentationFormat":["plaintext","markdown"],"deprecatedSupport":false,"preselectSupport":false},"contextSupport":false},"signatureHelp":{"dynamicRegistration":true,"signatureInformation":{"documentationFormat":["plaintext","markdown"]}},"declaration":{"dynamicRegistration":true,"linkSupport":true},"definition":{"dynamicRegistration":true,"linkSupport":true},"typeDefinition":{"dynamicRegistration":true,"linkSupport":true},"implementation":{"dynamicRegistration":true,"linkSupport":true}},"workspace":{"didChangeConfiguration":{"dynamicRegistration":true}}},"initializationOptions":null,"processId":null,"rootUri":"file:///home/ubuntu/artifacts/","workspaceFolders":null}}^M +Content-Length: 758 +Content-Type: application/vscode-jsonrpc; charset=utf-8 + +{"jsonrpc": "2.0", "id": 0, "result": {"capabilities": {"textDocumentSync": {"openClose": true, "change": 2, "willSave": false, "willSaveWaitUntil": false, "save": true}, "completionProvider": {"triggerCharacters": [".", "'", "\""], "resolveProvider": true}, "hoverProvider": true, "signatureHelpProvider": {"triggerCharacters": ["(", ","]}, "definitionProvider": true, "referencesProvider": true, "documentHighlightProvider": true, "documentSymbolProvider": true, "codeActionProvider": {"codeActionKinds": ["refactor.inline", "refactor.extract"]}, "renameProvider": true, "executeCommandProvider": {"commands": []}, "workspaceSymbolProvider": true, "workspace": {"workspaceFolders": {"supported": true, "changeNotifications": true}, "fileOperations": {}}}}} +``` + +## Configuration + +We recommend using [coc-jedi](https://github.com/pappasam/coc-jedi) and following its [configuration instructions](https://github.com/pappasam/coc-jedi#configuration). + +If you are configuring manually, jedi-language-server supports the following [initializationOptions](https://microsoft.github.io/language-server-protocol/specification#initialize): + +```json +{ + "initializationOptions": { + "codeAction": { + "nameExtractVariable": "jls_extract_var", + "nameExtractFunction": "jls_extract_def" + }, + "completion": { + "disableSnippets": false, + "resolveEagerly": false, + "ignorePatterns": [] + }, + "diagnostics": { + "enable": false, + "didOpen": true, + "didChange": true, + "didSave": true + }, + "hover": { + "enable": true, + "disable": { + "class": { "all": false, "names": [], "fullNames": [] }, + "function": { "all": false, "names": [], "fullNames": [] }, + "instance": { "all": false, "names": [], "fullNames": [] }, + "keyword": { "all": false, "names": [], "fullNames": [] }, + "module": { "all": false, "names": [], "fullNames": [] }, + "param": { "all": false, "names": [], "fullNames": [] }, + "path": { "all": false, "names": [], "fullNames": [] }, + "property": { "all": false, "names": [], "fullNames": [] }, + "statement": { "all": false, "names": [], "fullNames": [] } + } + }, + "jediSettings": { + "autoImportModules": [], + "caseInsensitiveCompletion": true, + "debug": false + }, + "markupKindPreferred": "markdown", + "workspace": { + "extraPaths": [], + "environmentPath": "/path/to/venv/bin/python", + "symbols": { + "ignoreFolders": [".nox", ".tox", ".venv", "__pycache__", "venv"], + "maxSymbols": 20 + } + } + } +} +``` + +See coc-jedi's [configuration instructions](https://github.com/pappasam/coc-jedi#configuration) for an explanation of the above configurations. + +## Diagnostics + +Diagnostics are provided by Python's built-in `compile` function. + +If you would like diagnostics (from [pylint](https://github.com/PyCQA/pylint), [mypy](https://github.com/python/mypy), etc.), we recommend using the powerful [diagnostic-language-server](https://github.com/iamcco/diagnostic-languageserver). + +## Code Formatting + +Again, we recommend that you use [diagnostic-language-server](https://github.com/iamcco/diagnostic-languageserver). It also supports code formatting. + +## Local Development + +To build and run this project from source: + +### Dependencies + +Install the following tools manually: + +- [Poetry](https://github.com/sdispater/poetry#installation) +- [GNU Make](https://www.gnu.org/software/make/) + +#### Recommended + +- [asdf](https://github.com/asdf-vm/asdf) + +### Get source code + +[Fork](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) this repository and clone the fork to your development machine: + +```bash +git clone https://github.com//jedi-language-server +cd jedi-language-server +``` + +### Set up development environment + +```bash +make setup +``` + +### Run tests + +```bash +make test +``` + +## Inspiration + +Palantir's [python-language-server](https://github.com/palantir/python-language-server) inspired this project. In fact, for consistency's sake, many of python-language-server's CLI options are used as-is in jedi-language-server. + +Unlike python-language-server, jedi-language-server: + +- Uses [pygls](https://github.com/openlawlibrary/pygls) instead of creating its own low-level Language Server Protocol bindings +- Supports one powerful 3rd party static analysis / completion / refactoring library: Jedi. By only supporting Jedi, we can focus on supporting all Jedi features without exposing ourselves to too many broken 3rd party dependencies (I'm looking at you, [rope](https://github.com/python-rope/rope)). +- Is supremely simple because of its scope constraints. Leave complexity to the Jedi [master](https://github.com/davidhalter). If the force is strong with you, please submit a PR! + +## Articles + +- [Python in VS Code Improves Jedi Language Server Support](https://visualstudiomagazine.com/articles/2021/03/17/vscode-jedi.aspx) + +## Written by + +[Samuel Roeca](https://samroeca.com/) diff --git a/pythonFiles/lib/jedilsp/attr/__init__.py b/pythonFiles/lib/jedilsp/attr/__init__.py new file mode 100644 index 0000000000000..04243782302a8 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/__init__.py @@ -0,0 +1,93 @@ +# SPDX-License-Identifier: MIT + +import sys +import warnings + +from functools import partial + +from . import converters, exceptions, filters, setters, validators +from ._cmp import cmp_using +from ._config import get_run_validators, set_run_validators +from ._funcs import asdict, assoc, astuple, evolve, has, resolve_types +from ._make import ( + NOTHING, + Attribute, + Factory, + attrib, + attrs, + fields, + fields_dict, + make_class, + validate, +) +from ._next_gen import define, field, frozen, mutable +from ._version_info import VersionInfo + + +if sys.version_info < (3, 7): # pragma: no cover + warnings.warn( + "Running attrs on Python 3.6 is deprecated & we intend to drop " + "support soon. If that's a problem for you, please let us know why & " + "we MAY re-evaluate: ", + DeprecationWarning, + ) + +__version__ = "22.2.0" +__version_info__ = VersionInfo._from_version_string(__version__) + +__title__ = "attrs" +__description__ = "Classes Without Boilerplate" +__url__ = "https://www.attrs.org/" +__uri__ = __url__ +__doc__ = __description__ + " <" + __uri__ + ">" + +__author__ = "Hynek Schlawack" +__email__ = "hs@ox.cx" + +__license__ = "MIT" +__copyright__ = "Copyright (c) 2015 Hynek Schlawack" + + +s = attributes = attrs +ib = attr = attrib +dataclass = partial(attrs, auto_attribs=True) # happy Easter ;) + + +class AttrsInstance: + pass + + +__all__ = [ + "Attribute", + "AttrsInstance", + "Factory", + "NOTHING", + "asdict", + "assoc", + "astuple", + "attr", + "attrib", + "attributes", + "attrs", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "field", + "fields", + "fields_dict", + "filters", + "frozen", + "get_run_validators", + "has", + "ib", + "make_class", + "mutable", + "resolve_types", + "s", + "set_run_validators", + "setters", + "validate", + "validators", +] diff --git a/pythonFiles/lib/jedilsp/attr/__init__.pyi b/pythonFiles/lib/jedilsp/attr/__init__.pyi new file mode 100644 index 0000000000000..42a2ee2cc68cd --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/__init__.pyi @@ -0,0 +1,509 @@ +import enum +import sys + +from typing import ( + Any, + Callable, + Dict, + Generic, + List, + Mapping, + Optional, + Protocol, + Sequence, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +# `import X as X` is required to make these public +from . import converters as converters +from . import exceptions as exceptions +from . import filters as filters +from . import setters as setters +from . import validators as validators +from ._cmp import cmp_using as cmp_using +from ._typing_compat import AttrsInstance_ +from ._version_info import VersionInfo + +if sys.version_info >= (3, 10): + from typing import TypeGuard +else: + from typing_extensions import TypeGuard + +__version__: str +__version_info__: VersionInfo +__title__: str +__description__: str +__url__: str +__uri__: str +__author__: str +__email__: str +__license__: str +__copyright__: str + +_T = TypeVar("_T") +_C = TypeVar("_C", bound=type) + +_EqOrderType = Union[bool, Callable[[Any], Any]] +_ValidatorType = Callable[[Any, "Attribute[_T]", _T], Any] +_ConverterType = Callable[[Any], Any] +_FilterType = Callable[["Attribute[_T]", _T], bool] +_ReprType = Callable[[Any], str] +_ReprArgType = Union[bool, _ReprType] +_OnSetAttrType = Callable[[Any, "Attribute[Any]", Any], Any] +_OnSetAttrArgType = Union[ + _OnSetAttrType, List[_OnSetAttrType], setters._NoOpType +] +_FieldTransformer = Callable[ + [type, List["Attribute[Any]"]], List["Attribute[Any]"] +] +# FIXME: in reality, if multiple validators are passed they must be in a list +# or tuple, but those are invariant and so would prevent subtypes of +# _ValidatorType from working when passed in a list or tuple. +_ValidatorArgType = Union[_ValidatorType[_T], Sequence[_ValidatorType[_T]]] + +# We subclass this here to keep the protocol's qualified name clean. +class AttrsInstance(AttrsInstance_, Protocol): + pass + +# _make -- + +class _Nothing(enum.Enum): + NOTHING = enum.auto() + +NOTHING = _Nothing.NOTHING + +# NOTE: Factory lies about its return type to make this possible: +# `x: List[int] # = Factory(list)` +# Work around mypy issue #4554 in the common case by using an overload. +if sys.version_info >= (3, 8): + from typing import Literal + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Callable[[Any], _T], + takes_self: Literal[True], + ) -> _T: ... + @overload + def Factory( + factory: Callable[[], _T], + takes_self: Literal[False], + ) -> _T: ... + +else: + @overload + def Factory(factory: Callable[[], _T]) -> _T: ... + @overload + def Factory( + factory: Union[Callable[[Any], _T], Callable[[], _T]], + takes_self: bool = ..., + ) -> _T: ... + +# Static type inference support via __dataclass_transform__ implemented as per: +# https://github.com/microsoft/pyright/blob/1.1.135/specs/dataclass_transforms.md +# This annotation must be applied to all overloads of "define" and "attrs" +# +# NOTE: This is a typing construct and does not exist at runtime. Extensions +# wrapping attrs decorators should declare a separate __dataclass_transform__ +# signature in the extension module using the specification linked above to +# provide pyright support. +def __dataclass_transform__( + *, + eq_default: bool = True, + order_default: bool = False, + kw_only_default: bool = False, + field_descriptors: Tuple[Union[type, Callable[..., Any]], ...] = (()), +) -> Callable[[_T], _T]: ... + +class Attribute(Generic[_T]): + name: str + default: Optional[_T] + validator: Optional[_ValidatorType[_T]] + repr: _ReprArgType + cmp: _EqOrderType + eq: _EqOrderType + order: _EqOrderType + hash: Optional[bool] + init: bool + converter: Optional[_ConverterType] + metadata: Dict[Any, Any] + type: Optional[Type[_T]] + kw_only: bool + on_setattr: _OnSetAttrType + alias: Optional[str] + + def evolve(self, **changes: Any) -> "Attribute[Any]": ... + +# NOTE: We had several choices for the annotation to use for type arg: +# 1) Type[_T] +# - Pros: Handles simple cases correctly +# - Cons: Might produce less informative errors in the case of conflicting +# TypeVars e.g. `attr.ib(default='bad', type=int)` +# 2) Callable[..., _T] +# - Pros: Better error messages than #1 for conflicting TypeVars +# - Cons: Terrible error messages for validator checks. +# e.g. attr.ib(type=int, validator=validate_str) +# -> error: Cannot infer function type argument +# 3) type (and do all of the work in the mypy plugin) +# - Pros: Simple here, and we could customize the plugin with our own errors. +# - Cons: Would need to write mypy plugin code to handle all the cases. +# We chose option #1. + +# `attr` lies about its return type to make the following possible: +# attr() -> Any +# attr(8) -> int +# attr(validator=) -> Whatever the callable expects. +# This makes this type of assignments possible: +# x: int = attr(8) +# +# This form catches explicit None or no default but with no other arguments +# returns Any. +@overload +def attrib( + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: None = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def attrib( + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def attrib( + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: Optional[Type[_T]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def attrib( + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + type: object = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +def field( + *, + default: None = ..., + validator: None = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: None = ..., + factory: None = ..., + kw_only: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... + +# This form catches an explicit None or no default and infers the type from the +# other arguments. +@overload +def field( + *, + default: None = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form catches an explicit default argument. +@overload +def field( + *, + default: _T, + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> _T: ... + +# This form covers type=non-Type: e.g. forward references (str), Any +@overload +def field( + *, + default: Optional[_T] = ..., + validator: Optional[_ValidatorArgType[_T]] = ..., + repr: _ReprArgType = ..., + hash: Optional[bool] = ..., + init: bool = ..., + metadata: Optional[Mapping[Any, Any]] = ..., + converter: Optional[_ConverterType] = ..., + factory: Optional[Callable[[], _T]] = ..., + kw_only: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + alias: Optional[str] = ..., +) -> Any: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: _C, + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> _C: ... +@overload +@__dataclass_transform__(order_default=True, field_descriptors=(attrib, field)) +def attrs( + maybe_cls: None = ..., + these: Optional[Dict[str, Any]] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + auto_detect: bool = ..., + collect_by_mro: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., + unsafe_hash: Optional[bool] = ..., +) -> Callable[[_C], _C]: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: _C, + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> _C: ... +@overload +@__dataclass_transform__(field_descriptors=(attrib, field)) +def define( + maybe_cls: None = ..., + *, + these: Optional[Dict[str, Any]] = ..., + repr: bool = ..., + unsafe_hash: Optional[bool] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[bool] = ..., + order: Optional[bool] = ..., + auto_detect: bool = ..., + getstate_setstate: Optional[bool] = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., + match_args: bool = ..., +) -> Callable[[_C], _C]: ... + +mutable = define +frozen = define # they differ only in their defaults + +def fields(cls: Type[AttrsInstance]) -> Any: ... +def fields_dict(cls: Type[AttrsInstance]) -> Dict[str, Attribute[Any]]: ... +def validate(inst: AttrsInstance) -> None: ... +def resolve_types( + cls: _C, + globalns: Optional[Dict[str, Any]] = ..., + localns: Optional[Dict[str, Any]] = ..., + attribs: Optional[List[Attribute[Any]]] = ..., +) -> _C: ... + +# TODO: add support for returning a proper attrs class from the mypy plugin +# we use Any instead of _CountingAttr so that e.g. `make_class('Foo', +# [attr.ib()])` is valid +def make_class( + name: str, + attrs: Union[List[str], Tuple[str, ...], Dict[str, Any]], + bases: Tuple[type, ...] = ..., + repr_ns: Optional[str] = ..., + repr: bool = ..., + cmp: Optional[_EqOrderType] = ..., + hash: Optional[bool] = ..., + init: bool = ..., + slots: bool = ..., + frozen: bool = ..., + weakref_slot: bool = ..., + str: bool = ..., + auto_attribs: bool = ..., + kw_only: bool = ..., + cache_hash: bool = ..., + auto_exc: bool = ..., + eq: Optional[_EqOrderType] = ..., + order: Optional[_EqOrderType] = ..., + collect_by_mro: bool = ..., + on_setattr: Optional[_OnSetAttrArgType] = ..., + field_transformer: Optional[_FieldTransformer] = ..., +) -> type: ... + +# _funcs -- + +# TODO: add support for returning TypedDict from the mypy plugin +# FIXME: asdict/astuple do not honor their factory args. Waiting on one of +# these: +# https://github.com/python/mypy/issues/4236 +# https://github.com/python/typing/issues/253 +# XXX: remember to fix attrs.asdict/astuple too! +def asdict( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: Optional[bool] = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: AttrsInstance, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... +def has(cls: type) -> TypeGuard[Type[AttrsInstance]]: ... +def assoc(inst: _T, **changes: Any) -> _T: ... +def evolve(inst: _T, **changes: Any) -> _T: ... + +# _config -- + +def set_run_validators(run: bool) -> None: ... +def get_run_validators() -> bool: ... + +# aliases -- + +s = attributes = attrs +ib = attr = attrib +dataclass = attrs # Technically, partial(attrs, auto_attribs=True) ;) diff --git a/pythonFiles/lib/jedilsp/attr/_cmp.py b/pythonFiles/lib/jedilsp/attr/_cmp.py new file mode 100644 index 0000000000000..ad1e18c75f1e5 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_cmp.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT + + +import functools +import types + +from ._make import _make_ne + + +_operation_names = {"eq": "==", "lt": "<", "le": "<=", "gt": ">", "ge": ">="} + + +def cmp_using( + eq=None, + lt=None, + le=None, + gt=None, + ge=None, + require_same_type=True, + class_name="Comparable", +): + """ + Create a class that can be passed into `attr.ib`'s ``eq``, ``order``, and + ``cmp`` arguments to customize field comparison. + + The resulting class will have a full set of ordering methods if + at least one of ``{lt, le, gt, ge}`` and ``eq`` are provided. + + :param Optional[callable] eq: `callable` used to evaluate equality + of two objects. + :param Optional[callable] lt: `callable` used to evaluate whether + one object is less than another object. + :param Optional[callable] le: `callable` used to evaluate whether + one object is less than or equal to another object. + :param Optional[callable] gt: `callable` used to evaluate whether + one object is greater than another object. + :param Optional[callable] ge: `callable` used to evaluate whether + one object is greater than or equal to another object. + + :param bool require_same_type: When `True`, equality and ordering methods + will return `NotImplemented` if objects are not of the same type. + + :param Optional[str] class_name: Name of class. Defaults to 'Comparable'. + + See `comparison` for more details. + + .. versionadded:: 21.1.0 + """ + + body = { + "__slots__": ["value"], + "__init__": _make_init(), + "_requirements": [], + "_is_comparable_to": _is_comparable_to, + } + + # Add operations. + num_order_functions = 0 + has_eq_function = False + + if eq is not None: + has_eq_function = True + body["__eq__"] = _make_operator("eq", eq) + body["__ne__"] = _make_ne() + + if lt is not None: + num_order_functions += 1 + body["__lt__"] = _make_operator("lt", lt) + + if le is not None: + num_order_functions += 1 + body["__le__"] = _make_operator("le", le) + + if gt is not None: + num_order_functions += 1 + body["__gt__"] = _make_operator("gt", gt) + + if ge is not None: + num_order_functions += 1 + body["__ge__"] = _make_operator("ge", ge) + + type_ = types.new_class( + class_name, (object,), {}, lambda ns: ns.update(body) + ) + + # Add same type requirement. + if require_same_type: + type_._requirements.append(_check_same_type) + + # Add total ordering if at least one operation was defined. + if 0 < num_order_functions < 4: + if not has_eq_function: + # functools.total_ordering requires __eq__ to be defined, + # so raise early error here to keep a nice stack. + raise ValueError( + "eq must be define is order to complete ordering from " + "lt, le, gt, ge." + ) + type_ = functools.total_ordering(type_) + + return type_ + + +def _make_init(): + """ + Create __init__ method. + """ + + def __init__(self, value): + """ + Initialize object with *value*. + """ + self.value = value + + return __init__ + + +def _make_operator(name, func): + """ + Create operator method. + """ + + def method(self, other): + if not self._is_comparable_to(other): + return NotImplemented + + result = func(self.value, other.value) + if result is NotImplemented: + return NotImplemented + + return result + + method.__name__ = f"__{name}__" + method.__doc__ = ( + f"Return a {_operation_names[name]} b. Computed by attrs." + ) + + return method + + +def _is_comparable_to(self, other): + """ + Check whether `other` is comparable to `self`. + """ + for func in self._requirements: + if not func(self, other): + return False + return True + + +def _check_same_type(self, other): + """ + Return True if *self* and *other* are of the same type, False otherwise. + """ + return other.value.__class__ is self.value.__class__ diff --git a/pythonFiles/lib/jedilsp/attr/_cmp.pyi b/pythonFiles/lib/jedilsp/attr/_cmp.pyi new file mode 100644 index 0000000000000..f3dcdc1a75414 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_cmp.pyi @@ -0,0 +1,13 @@ +from typing import Any, Callable, Optional, Type + +_CompareWithType = Callable[[Any, Any], bool] + +def cmp_using( + eq: Optional[_CompareWithType] = ..., + lt: Optional[_CompareWithType] = ..., + le: Optional[_CompareWithType] = ..., + gt: Optional[_CompareWithType] = ..., + ge: Optional[_CompareWithType] = ..., + require_same_type: bool = ..., + class_name: str = ..., +) -> Type: ... diff --git a/pythonFiles/lib/jedilsp/attr/_compat.py b/pythonFiles/lib/jedilsp/attr/_compat.py new file mode 100644 index 0000000000000..35a85a3fa421f --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_compat.py @@ -0,0 +1,176 @@ +# SPDX-License-Identifier: MIT + + +import inspect +import platform +import sys +import threading +import types +import warnings + +from collections.abc import Mapping, Sequence # noqa + + +PYPY = platform.python_implementation() == "PyPy" +PY310 = sys.version_info[:2] >= (3, 10) +PY_3_12_PLUS = sys.version_info[:2] >= (3, 12) + + +def just_warn(*args, **kw): + warnings.warn( + "Running interpreter doesn't sufficiently support code object " + "introspection. Some features like bare super() or accessing " + "__class__ will not work with slotted classes.", + RuntimeWarning, + stacklevel=2, + ) + + +class _AnnotationExtractor: + """ + Extract type annotations from a callable, returning None whenever there + is none. + """ + + __slots__ = ["sig"] + + def __init__(self, callable): + try: + self.sig = inspect.signature(callable) + except (ValueError, TypeError): # inspect failed + self.sig = None + + def get_first_param_type(self): + """ + Return the type annotation of the first argument if it's not empty. + """ + if not self.sig: + return None + + params = list(self.sig.parameters.values()) + if params and params[0].annotation is not inspect.Parameter.empty: + return params[0].annotation + + return None + + def get_return_type(self): + """ + Return the return type if it's not empty. + """ + if ( + self.sig + and self.sig.return_annotation is not inspect.Signature.empty + ): + return self.sig.return_annotation + + return None + + +def make_set_closure_cell(): + """Return a function of two arguments (cell, value) which sets + the value stored in the closure cell `cell` to `value`. + """ + # pypy makes this easy. (It also supports the logic below, but + # why not do the easy/fast thing?) + if PYPY: + + def set_closure_cell(cell, value): + cell.__setstate__((value,)) + + return set_closure_cell + + # Otherwise gotta do it the hard way. + + # Create a function that will set its first cellvar to `value`. + def set_first_cellvar_to(value): + x = value + return + + # This function will be eliminated as dead code, but + # not before its reference to `x` forces `x` to be + # represented as a closure cell rather than a local. + def force_x_to_be_a_cell(): # pragma: no cover + return x + + try: + # Extract the code object and make sure our assumptions about + # the closure behavior are correct. + co = set_first_cellvar_to.__code__ + if co.co_cellvars != ("x",) or co.co_freevars != (): + raise AssertionError # pragma: no cover + + # Convert this code object to a code object that sets the + # function's first _freevar_ (not cellvar) to the argument. + if sys.version_info >= (3, 8): + + def set_closure_cell(cell, value): + cell.cell_contents = value + + else: + args = [co.co_argcount] + args.append(co.co_kwonlyargcount) + args.extend( + [ + co.co_nlocals, + co.co_stacksize, + co.co_flags, + co.co_code, + co.co_consts, + co.co_names, + co.co_varnames, + co.co_filename, + co.co_name, + co.co_firstlineno, + co.co_lnotab, + # These two arguments are reversed: + co.co_cellvars, + co.co_freevars, + ] + ) + set_first_freevar_code = types.CodeType(*args) + + def set_closure_cell(cell, value): + # Create a function using the set_first_freevar_code, + # whose first closure cell is `cell`. Calling it will + # change the value of that cell. + setter = types.FunctionType( + set_first_freevar_code, {}, "setter", (), (cell,) + ) + # And call it to set the cell. + setter(value) + + # Make sure it works on this interpreter: + def make_func_with_cell(): + x = None + + def func(): + return x # pragma: no cover + + return func + + cell = make_func_with_cell().__closure__[0] + set_closure_cell(cell, 100) + if cell.cell_contents != 100: + raise AssertionError # pragma: no cover + + except Exception: + return just_warn + else: + return set_closure_cell + + +set_closure_cell = make_set_closure_cell() + +# Thread-local global to track attrs instances which are already being repr'd. +# This is needed because there is no other (thread-safe) way to pass info +# about the instances that are already being repr'd through the call stack +# in order to ensure we don't perform infinite recursion. +# +# For instance, if an instance contains a dict which contains that instance, +# we need to know that we're already repr'ing the outside instance from within +# the dict's repr() call. +# +# This lives here rather than in _make.py so that the functions in _make.py +# don't have a direct reference to the thread-local in their globals dict. +# If they have such a reference, it breaks cloudpickle. +repr_context = threading.local() diff --git a/pythonFiles/lib/jedilsp/attr/_config.py b/pythonFiles/lib/jedilsp/attr/_config.py new file mode 100644 index 0000000000000..96d4200773d85 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_config.py @@ -0,0 +1,31 @@ +# SPDX-License-Identifier: MIT + + +__all__ = ["set_run_validators", "get_run_validators"] + +_run_validators = True + + +def set_run_validators(run): + """ + Set whether or not validators are run. By default, they are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.set_disabled()` + instead. + """ + if not isinstance(run, bool): + raise TypeError("'run' must be bool.") + global _run_validators + _run_validators = run + + +def get_run_validators(): + """ + Return whether or not validators are run. + + .. deprecated:: 21.3.0 It will not be removed, but it also will not be + moved to new ``attrs`` namespace. Use `attrs.validators.get_disabled()` + instead. + """ + return _run_validators diff --git a/pythonFiles/lib/jedilsp/attr/_funcs.py b/pythonFiles/lib/jedilsp/attr/_funcs.py new file mode 100644 index 0000000000000..1f573c110a44a --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_funcs.py @@ -0,0 +1,418 @@ +# SPDX-License-Identifier: MIT + + +import copy + +from ._make import NOTHING, _obj_setattr, fields +from .exceptions import AttrsAttributeNotFoundError + + +def asdict( + inst, + recurse=True, + filter=None, + dict_factory=dict, + retain_collection_types=False, + value_serializer=None, +): + """ + Return the ``attrs`` attribute values of *inst* as a dict. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable dict_factory: A callable to produce dictionaries from. For + example, to produce ordered dictionaries instead of normal Python + dictionaries, pass in ``collections.OrderedDict``. + :param bool retain_collection_types: Do not convert to ``list`` when + encountering an attribute whose type is ``tuple`` or ``set``. Only + meaningful if ``recurse`` is ``True``. + :param Optional[callable] value_serializer: A hook that is called for every + attribute or dict key/value. It receives the current instance, field + and value and must return the (updated) value. The hook is run *after* + the optional *filter* has been applied. + + :rtype: return type of *dict_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.0.0 *dict_factory* + .. versionadded:: 16.1.0 *retain_collection_types* + .. versionadded:: 20.3.0 *value_serializer* + .. versionadded:: 21.3.0 If a dict has a collection for a key, it is + serialized as a tuple. + """ + attrs = fields(inst.__class__) + rv = dict_factory() + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + + if value_serializer is not None: + v = value_serializer(inst, a, v) + + if recurse is True: + if has(v.__class__): + rv[a.name] = asdict( + v, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain_collection_types is True else list + rv[a.name] = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in v + ] + ) + elif isinstance(v, dict): + df = dict_factory + rv[a.name] = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in v.items() + ) + else: + rv[a.name] = v + else: + rv[a.name] = v + return rv + + +def _asdict_anything( + val, + is_key, + filter, + dict_factory, + retain_collection_types, + value_serializer, +): + """ + ``asdict`` only works on attrs instances, this works on anything. + """ + if getattr(val.__class__, "__attrs_attrs__", None) is not None: + # Attrs class. + rv = asdict( + val, + recurse=True, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + elif isinstance(val, (tuple, list, set, frozenset)): + if retain_collection_types is True: + cf = val.__class__ + elif is_key: + cf = tuple + else: + cf = list + + rv = cf( + [ + _asdict_anything( + i, + is_key=False, + filter=filter, + dict_factory=dict_factory, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ) + for i in val + ] + ) + elif isinstance(val, dict): + df = dict_factory + rv = df( + ( + _asdict_anything( + kk, + is_key=True, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + _asdict_anything( + vv, + is_key=False, + filter=filter, + dict_factory=df, + retain_collection_types=retain_collection_types, + value_serializer=value_serializer, + ), + ) + for kk, vv in val.items() + ) + else: + rv = val + if value_serializer is not None: + rv = value_serializer(None, None, rv) + + return rv + + +def astuple( + inst, + recurse=True, + filter=None, + tuple_factory=tuple, + retain_collection_types=False, +): + """ + Return the ``attrs`` attribute values of *inst* as a tuple. + + Optionally recurse into other ``attrs``-decorated classes. + + :param inst: Instance of an ``attrs``-decorated class. + :param bool recurse: Recurse into classes that are also + ``attrs``-decorated. + :param callable filter: A callable whose return code determines whether an + attribute or element is included (``True``) or dropped (``False``). Is + called with the `attrs.Attribute` as the first argument and the + value as the second argument. + :param callable tuple_factory: A callable to produce tuples from. For + example, to produce lists instead of tuples. + :param bool retain_collection_types: Do not convert to ``list`` + or ``dict`` when encountering an attribute which type is + ``tuple``, ``dict`` or ``set``. Only meaningful if ``recurse`` is + ``True``. + + :rtype: return type of *tuple_factory* + + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 16.2.0 + """ + attrs = fields(inst.__class__) + rv = [] + retain = retain_collection_types # Very long. :/ + for a in attrs: + v = getattr(inst, a.name) + if filter is not None and not filter(a, v): + continue + if recurse is True: + if has(v.__class__): + rv.append( + astuple( + v, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + ) + elif isinstance(v, (tuple, list, set, frozenset)): + cf = v.__class__ if retain is True else list + rv.append( + cf( + [ + astuple( + j, + recurse=True, + filter=filter, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(j.__class__) + else j + for j in v + ] + ) + ) + elif isinstance(v, dict): + df = v.__class__ if retain is True else dict + rv.append( + df( + ( + astuple( + kk, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(kk.__class__) + else kk, + astuple( + vv, + tuple_factory=tuple_factory, + retain_collection_types=retain, + ) + if has(vv.__class__) + else vv, + ) + for kk, vv in v.items() + ) + ) + else: + rv.append(v) + else: + rv.append(v) + + return rv if tuple_factory is list else tuple_factory(rv) + + +def has(cls): + """ + Check whether *cls* is a class with ``attrs`` attributes. + + :param type cls: Class to introspect. + :raise TypeError: If *cls* is not a class. + + :rtype: bool + """ + return getattr(cls, "__attrs_attrs__", None) is not None + + +def assoc(inst, **changes): + """ + Copy *inst* and apply *changes*. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise attr.exceptions.AttrsAttributeNotFoundError: If *attr_name* couldn't + be found on *cls*. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. deprecated:: 17.1.0 + Use `attrs.evolve` instead if you can. + This function will not be removed du to the slightly different approach + compared to `attrs.evolve`. + """ + import warnings + + warnings.warn( + "assoc is deprecated and will be removed after 2018/01.", + DeprecationWarning, + stacklevel=2, + ) + new = copy.copy(inst) + attrs = fields(inst.__class__) + for k, v in changes.items(): + a = getattr(attrs, k, NOTHING) + if a is NOTHING: + raise AttrsAttributeNotFoundError( + f"{k} is not an attrs attribute on {new.__class__}." + ) + _obj_setattr(new, k, v) + return new + + +def evolve(inst, **changes): + """ + Create a new instance, based on *inst* with *changes* applied. + + :param inst: Instance of a class with ``attrs`` attributes. + :param changes: Keyword changes in the new copy. + + :return: A copy of inst with *changes* incorporated. + + :raise TypeError: If *attr_name* couldn't be found in the class + ``__init__``. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + .. versionadded:: 17.1.0 + """ + cls = inst.__class__ + attrs = fields(cls) + for a in attrs: + if not a.init: + continue + attr_name = a.name # To deal with private attributes. + init_name = a.alias + if init_name not in changes: + changes[init_name] = getattr(inst, attr_name) + + return cls(**changes) + + +def resolve_types(cls, globalns=None, localns=None, attribs=None): + """ + Resolve any strings and forward annotations in type annotations. + + This is only required if you need concrete types in `Attribute`'s *type* + field. In other words, you don't need to resolve your types if you only + use them for static type checking. + + With no arguments, names will be looked up in the module in which the class + was created. If this is not what you want, e.g. if the name only exists + inside a method, you may pass *globalns* or *localns* to specify other + dictionaries in which to look up these names. See the docs of + `typing.get_type_hints` for more details. + + :param type cls: Class to resolve. + :param Optional[dict] globalns: Dictionary containing global variables. + :param Optional[dict] localns: Dictionary containing local variables. + :param Optional[list] attribs: List of attribs for the given class. + This is necessary when calling from inside a ``field_transformer`` + since *cls* is not an ``attrs`` class yet. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class and you didn't pass any attribs. + :raise NameError: If types cannot be resolved because of missing variables. + + :returns: *cls* so you can use this function also as a class decorator. + Please note that you have to apply it **after** `attrs.define`. That + means the decorator has to come in the line **before** `attrs.define`. + + .. versionadded:: 20.1.0 + .. versionadded:: 21.1.0 *attribs* + + """ + # Since calling get_type_hints is expensive we cache whether we've + # done it already. + if getattr(cls, "__attrs_types_resolved__", None) != cls: + import typing + + hints = typing.get_type_hints(cls, globalns=globalns, localns=localns) + for field in fields(cls) if attribs is None else attribs: + if field.name in hints: + # Since fields have been frozen we must work around it. + _obj_setattr(field, "type", hints[field.name]) + # We store the class we resolved so that subclasses know they haven't + # been resolved. + cls.__attrs_types_resolved__ = cls + + # Return the class so you can use it as a decorator too. + return cls diff --git a/pythonFiles/lib/jedilsp/attr/_make.py b/pythonFiles/lib/jedilsp/attr/_make.py new file mode 100644 index 0000000000000..9ee22005ba40b --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_make.py @@ -0,0 +1,2965 @@ +# SPDX-License-Identifier: MIT + +import copy +import enum +import linecache +import sys +import types +import typing + +from operator import itemgetter + +# We need to import _compat itself in addition to the _compat members to avoid +# having the thread-local in the globals here. +from . import _compat, _config, setters +from ._compat import PY310, PYPY, _AnnotationExtractor, set_closure_cell +from .exceptions import ( + DefaultAlreadySetError, + FrozenInstanceError, + NotAnAttrsClassError, + UnannotatedAttributeError, +) + + +# This is used at least twice, so cache it here. +_obj_setattr = object.__setattr__ +_init_converter_pat = "__attr_converter_%s" +_init_factory_pat = "__attr_factory_%s" +_classvar_prefixes = ( + "typing.ClassVar", + "t.ClassVar", + "ClassVar", + "typing_extensions.ClassVar", +) +# we don't use a double-underscore prefix because that triggers +# name mangling when trying to create a slot for the field +# (when slots=True) +_hash_cache_field = "_attrs_cached_hash" + +_empty_metadata_singleton = types.MappingProxyType({}) + +# Unique object for unequivocal getattr() defaults. +_sentinel = object() + +_ng_default_on_setattr = setters.pipe(setters.convert, setters.validate) + + +class _Nothing(enum.Enum): + """ + Sentinel to indicate the lack of a value when ``None`` is ambiguous. + + If extending attrs, you can use ``typing.Literal[NOTHING]`` to show + that a value may be ``NOTHING``. + + .. versionchanged:: 21.1.0 ``bool(NOTHING)`` is now False. + .. versionchanged:: 22.2.0 ``NOTHING`` is now an ``enum.Enum`` variant. + """ + + NOTHING = enum.auto() + + def __repr__(self): + return "NOTHING" + + def __bool__(self): + return False + + +NOTHING = _Nothing.NOTHING +""" +Sentinel to indicate the lack of a value when ``None`` is ambiguous. +""" + + +class _CacheHashWrapper(int): + """ + An integer subclass that pickles / copies as None + + This is used for non-slots classes with ``cache_hash=True``, to avoid + serializing a potentially (even likely) invalid hash value. Since ``None`` + is the default value for uncalculated hashes, whenever this is copied, + the copy's value for the hash should automatically reset. + + See GH #613 for more details. + """ + + def __reduce__(self, _none_constructor=type(None), _args=()): + return _none_constructor, _args + + +def attrib( + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=None, + init=True, + metadata=None, + type=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Create a new attribute on a class. + + .. warning:: + + Does *not* do anything unless the class is also decorated with + `attr.s`! + + :param default: A value that is used if an ``attrs``-generated ``__init__`` + is used and no value is passed while instantiating or the attribute is + excluded using ``init=False``. + + If the value is an instance of `attrs.Factory`, its callable will be + used to construct a new value (useful for mutable data types like lists + or dicts). + + If a default is not set (or set manually to `attrs.NOTHING`), a value + *must* be supplied when instantiating; otherwise a `TypeError` + will be raised. + + The default can also be set using decorator notation as shown below. + + :type default: Any value + + :param callable factory: Syntactic sugar for + ``default=attr.Factory(factory)``. + + :param validator: `callable` that is called by ``attrs``-generated + ``__init__`` methods after the instance has been initialized. They + receive the initialized instance, the :func:`~attrs.Attribute`, and the + passed value. + + The return value is *not* inspected so the validator has to throw an + exception itself. + + If a `list` is passed, its items are treated as validators and must + all pass. + + Validators can be globally disabled and re-enabled using + `get_run_validators`. + + The validator can also be set using decorator notation as shown below. + + :type validator: `callable` or a `list` of `callable`\\ s. + + :param repr: Include this attribute in the generated ``__repr__`` + method. If ``True``, include the attribute; if ``False``, omit it. By + default, the built-in ``repr()`` function is used. To override how the + attribute value is formatted, pass a ``callable`` that takes a single + value and returns a string. Note that the resulting string is used + as-is, i.e. it will be used directly *instead* of calling ``repr()`` + (the default). + :type repr: a `bool` or a `callable` to use a custom function. + + :param eq: If ``True`` (default), include this attribute in the + generated ``__eq__`` and ``__ne__`` methods that check two instances + for equality. To override how the attribute value is compared, + pass a ``callable`` that takes a single value and returns the value + to be compared. + :type eq: a `bool` or a `callable`. + + :param order: If ``True`` (default), include this attributes in the + generated ``__lt__``, ``__le__``, ``__gt__`` and ``__ge__`` methods. + To override how the attribute value is ordered, + pass a ``callable`` that takes a single value and returns the value + to be ordered. + :type order: a `bool` or a `callable`. + + :param cmp: Setting *cmp* is equivalent to setting *eq* and *order* to the + same value. Must not be mixed with *eq* or *order*. + :type cmp: a `bool` or a `callable`. + + :param Optional[bool] hash: Include this attribute in the generated + ``__hash__`` method. If ``None`` (default), mirror *eq*'s value. This + is the correct behavior according the Python spec. Setting this value + to anything else than ``None`` is *discouraged*. + :param bool init: Include this attribute in the generated ``__init__`` + method. It is possible to set this to ``False`` and set a default + value. In that case this attributed is unconditionally initialized + with the specified default value or factory. + :param callable converter: `callable` that is called by + ``attrs``-generated ``__init__`` methods to convert attribute's value + to the desired format. It is given the passed-in value, and the + returned value will be used as the new value of the attribute. The + value is converted before being passed to the validator, if any. + :param metadata: An arbitrary mapping, to be used by third-party + components. See `extending-metadata`. + + :param type: The type of the attribute. Nowadays, the preferred method to + specify the type is using a variable annotation (see :pep:`526`). + This argument is provided for backward compatibility. + Regardless of the approach used, the type will be stored on + ``Attribute.type``. + + Please note that ``attrs`` doesn't do anything with this metadata by + itself. You can use it as part of your own code or for + `static type checking `. + :param kw_only: Make this attribute keyword-only in the generated + ``__init__`` (if ``init`` is ``False``, this parameter is ignored). + :param on_setattr: Allows to overwrite the *on_setattr* setting from + `attr.s`. If left `None`, the *on_setattr* value from `attr.s` is used. + Set to `attrs.setters.NO_OP` to run **no** `setattr` hooks for this + attribute -- regardless of the setting in `attr.s`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + :param Optional[str] alias: Override this attribute's parameter name in the + generated ``__init__`` method. If left `None`, default to ``name`` + stripped of leading underscores. See `private-attributes`. + + .. versionadded:: 15.2.0 *convert* + .. versionadded:: 16.3.0 *metadata* + .. versionchanged:: 17.1.0 *validator* can be a ``list`` now. + .. versionchanged:: 17.1.0 + *hash* is ``None`` and therefore mirrors *eq* by default. + .. versionadded:: 17.3.0 *type* + .. deprecated:: 17.4.0 *convert* + .. versionadded:: 17.4.0 *converter* as a replacement for the deprecated + *convert* to achieve consistency with other noun-based arguments. + .. versionadded:: 18.1.0 + ``factory=f`` is syntactic sugar for ``default=attr.Factory(f)``. + .. versionadded:: 18.2.0 *kw_only* + .. versionchanged:: 19.2.0 *convert* keyword argument removed. + .. versionchanged:: 19.2.0 *repr* also accepts a custom callable. + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.3.0 *kw_only* backported to Python 2 + .. versionchanged:: 21.1.0 + *eq*, *order*, and *cmp* also accept a custom callable + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 22.2.0 *alias* + """ + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq, order, True + ) + + if hash is not None and hash is not True and hash is not False: + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + + if factory is not None: + if default is not NOTHING: + raise ValueError( + "The `default` and `factory` arguments are mutually " + "exclusive." + ) + if not callable(factory): + raise ValueError("The `factory` argument must be a callable.") + default = Factory(factory) + + if metadata is None: + metadata = {} + + # Apply syntactic sugar by auto-wrapping. + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + if validator and isinstance(validator, (list, tuple)): + validator = and_(*validator) + + if converter and isinstance(converter, (list, tuple)): + converter = pipe(*converter) + + return _CountingAttr( + default=default, + validator=validator, + repr=repr, + cmp=None, + hash=hash, + init=init, + converter=converter, + metadata=metadata, + type=type, + kw_only=kw_only, + eq=eq, + eq_key=eq_key, + order=order, + order_key=order_key, + on_setattr=on_setattr, + alias=alias, + ) + + +def _compile_and_eval(script, globs, locs=None, filename=""): + """ + "Exec" the script with the given global (globs) and local (locs) variables. + """ + bytecode = compile(script, filename, "exec") + eval(bytecode, globs, locs) + + +def _make_method(name, script, filename, globs): + """ + Create the method with the script given and return the method object. + """ + locs = {} + + # In order of debuggers like PDB being able to step through the code, + # we add a fake linecache entry. + count = 1 + base_filename = filename + while True: + linecache_tuple = ( + len(script), + None, + script.splitlines(True), + filename, + ) + old_val = linecache.cache.setdefault(filename, linecache_tuple) + if old_val == linecache_tuple: + break + else: + filename = f"{base_filename[:-1]}-{count}>" + count += 1 + + _compile_and_eval(script, globs, locs, filename) + + return locs[name] + + +def _make_attr_tuple_class(cls_name, attr_names): + """ + Create a tuple subclass to hold `Attribute`s for an `attrs` class. + + The subclass is a bare tuple with properties for names. + + class MyClassAttributes(tuple): + __slots__ = () + x = property(itemgetter(0)) + """ + attr_class_name = f"{cls_name}Attributes" + attr_class_template = [ + f"class {attr_class_name}(tuple):", + " __slots__ = ()", + ] + if attr_names: + for i, attr_name in enumerate(attr_names): + attr_class_template.append( + f" {attr_name} = _attrs_property(_attrs_itemgetter({i}))" + ) + else: + attr_class_template.append(" pass") + globs = {"_attrs_itemgetter": itemgetter, "_attrs_property": property} + _compile_and_eval("\n".join(attr_class_template), globs) + return globs[attr_class_name] + + +# Tuple class for extracted attributes from a class definition. +# `base_attrs` is a subset of `attrs`. +_Attributes = _make_attr_tuple_class( + "_Attributes", + [ + # all attributes to build dunder methods for + "attrs", + # attributes that have been inherited + "base_attrs", + # map inherited attributes to their originating classes + "base_attrs_map", + ], +) + + +def _is_class_var(annot): + """ + Check whether *annot* is a typing.ClassVar. + + The string comparison hack is used to avoid evaluating all string + annotations which would put attrs-based classes at a performance + disadvantage compared to plain old classes. + """ + annot = str(annot) + + # Annotation can be quoted. + if annot.startswith(("'", '"')) and annot.endswith(("'", '"')): + annot = annot[1:-1] + + return annot.startswith(_classvar_prefixes) + + +def _has_own_attribute(cls, attrib_name): + """ + Check whether *cls* defines *attrib_name* (and doesn't just inherit it). + """ + attr = getattr(cls, attrib_name, _sentinel) + if attr is _sentinel: + return False + + for base_cls in cls.__mro__[1:]: + a = getattr(base_cls, attrib_name, None) + if attr is a: + return False + + return True + + +def _get_annotations(cls): + """ + Get annotations for *cls*. + """ + if _has_own_attribute(cls, "__annotations__"): + return cls.__annotations__ + + return {} + + +def _collect_base_attrs(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in reversed(cls.__mro__[1:-1]): + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.inherited or a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + # For each name, only keep the freshest definition i.e. the furthest at the + # back. base_attr_map is fine because it gets overwritten with every new + # instance. + filtered = [] + seen = set() + for a in reversed(base_attrs): + if a.name in seen: + continue + filtered.insert(0, a) + seen.add(a.name) + + return filtered, base_attr_map + + +def _collect_base_attrs_broken(cls, taken_attr_names): + """ + Collect attr.ibs from base classes of *cls*, except *taken_attr_names*. + + N.B. *taken_attr_names* will be mutated. + + Adhere to the old incorrect behavior. + + Notably it collects from the front and considers inherited attributes which + leads to the buggy behavior reported in #428. + """ + base_attrs = [] + base_attr_map = {} # A dictionary of base attrs to their classes. + + # Traverse the MRO and collect attributes. + for base_cls in cls.__mro__[1:-1]: + for a in getattr(base_cls, "__attrs_attrs__", []): + if a.name in taken_attr_names: + continue + + a = a.evolve(inherited=True) + taken_attr_names.add(a.name) + base_attrs.append(a) + base_attr_map[a.name] = base_cls + + return base_attrs, base_attr_map + + +def _transform_attrs( + cls, these, auto_attribs, kw_only, collect_by_mro, field_transformer +): + """ + Transform all `_CountingAttr`s on a class into `Attribute`s. + + If *these* is passed, use that and don't look for them on the class. + + *collect_by_mro* is True, collect them in the correct MRO order, otherwise + use the old -- incorrect -- order. See #428. + + Return an `_Attributes`. + """ + cd = cls.__dict__ + anns = _get_annotations(cls) + + if these is not None: + ca_list = [(name, ca) for name, ca in these.items()] + elif auto_attribs is True: + ca_names = { + name + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + } + ca_list = [] + annot_names = set() + for attr_name, type in anns.items(): + if _is_class_var(type): + continue + annot_names.add(attr_name) + a = cd.get(attr_name, NOTHING) + + if not isinstance(a, _CountingAttr): + if a is NOTHING: + a = attrib() + else: + a = attrib(default=a) + ca_list.append((attr_name, a)) + + unannotated = ca_names - annot_names + if len(unannotated) > 0: + raise UnannotatedAttributeError( + "The following `attr.ib`s lack a type annotation: " + + ", ".join( + sorted(unannotated, key=lambda n: cd.get(n).counter) + ) + + "." + ) + else: + ca_list = sorted( + ( + (name, attr) + for name, attr in cd.items() + if isinstance(attr, _CountingAttr) + ), + key=lambda e: e[1].counter, + ) + + own_attrs = [ + Attribute.from_counting_attr( + name=attr_name, ca=ca, type=anns.get(attr_name) + ) + for attr_name, ca in ca_list + ] + + if collect_by_mro: + base_attrs, base_attr_map = _collect_base_attrs( + cls, {a.name for a in own_attrs} + ) + else: + base_attrs, base_attr_map = _collect_base_attrs_broken( + cls, {a.name for a in own_attrs} + ) + + if kw_only: + own_attrs = [a.evolve(kw_only=True) for a in own_attrs] + base_attrs = [a.evolve(kw_only=True) for a in base_attrs] + + attrs = base_attrs + own_attrs + + # Mandatory vs non-mandatory attr order only matters when they are part of + # the __init__ signature and when they aren't kw_only (which are moved to + # the end and can be mandatory or non-mandatory in any order, as they will + # be specified as keyword args anyway). Check the order of those attrs: + had_default = False + for a in (a for a in attrs if a.init is not False and a.kw_only is False): + if had_default is True and a.default is NOTHING: + raise ValueError( + "No mandatory attributes allowed after an attribute with a " + f"default value or factory. Attribute in question: {a!r}" + ) + + if had_default is False and a.default is not NOTHING: + had_default = True + + if field_transformer is not None: + attrs = field_transformer(cls, attrs) + + # Resolve default field alias after executing field_transformer. + # This allows field_transformer to differentiate between explicit vs + # default aliases and supply their own defaults. + attrs = [ + a.evolve(alias=_default_init_alias_for(a.name)) if not a.alias else a + for a in attrs + ] + + # Create AttrsClass *after* applying the field_transformer since it may + # add or remove attributes! + attr_names = [a.name for a in attrs] + AttrsClass = _make_attr_tuple_class(cls.__name__, attr_names) + + return _Attributes((AttrsClass(attrs), base_attrs, base_attr_map)) + + +if PYPY: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + if isinstance(self, BaseException) and name in ( + "__cause__", + "__context__", + ): + BaseException.__setattr__(self, name, value) + return + + raise FrozenInstanceError() + +else: + + def _frozen_setattrs(self, name, value): + """ + Attached to frozen classes as __setattr__. + """ + raise FrozenInstanceError() + + +def _frozen_delattrs(self, name): + """ + Attached to frozen classes as __delattr__. + """ + raise FrozenInstanceError() + + +class _ClassBuilder: + """ + Iteratively build *one* class. + """ + + __slots__ = ( + "_attr_names", + "_attrs", + "_base_attr_map", + "_base_names", + "_cache_hash", + "_cls", + "_cls_dict", + "_delete_attribs", + "_frozen", + "_has_pre_init", + "_has_post_init", + "_is_exc", + "_on_setattr", + "_slots", + "_weakref_slot", + "_wrote_own_setattr", + "_has_custom_setattr", + ) + + def __init__( + self, + cls, + these, + slots, + frozen, + weakref_slot, + getstate_setstate, + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_custom_setattr, + field_transformer, + ): + attrs, base_attrs, base_map = _transform_attrs( + cls, + these, + auto_attribs, + kw_only, + collect_by_mro, + field_transformer, + ) + + self._cls = cls + self._cls_dict = dict(cls.__dict__) if slots else {} + self._attrs = attrs + self._base_names = {a.name for a in base_attrs} + self._base_attr_map = base_map + self._attr_names = tuple(a.name for a in attrs) + self._slots = slots + self._frozen = frozen + self._weakref_slot = weakref_slot + self._cache_hash = cache_hash + self._has_pre_init = bool(getattr(cls, "__attrs_pre_init__", False)) + self._has_post_init = bool(getattr(cls, "__attrs_post_init__", False)) + self._delete_attribs = not bool(these) + self._is_exc = is_exc + self._on_setattr = on_setattr + + self._has_custom_setattr = has_custom_setattr + self._wrote_own_setattr = False + + self._cls_dict["__attrs_attrs__"] = self._attrs + + if frozen: + self._cls_dict["__setattr__"] = _frozen_setattrs + self._cls_dict["__delattr__"] = _frozen_delattrs + + self._wrote_own_setattr = True + elif on_setattr in ( + _ng_default_on_setattr, + setters.validate, + setters.convert, + ): + has_validator = has_converter = False + for a in attrs: + if a.validator is not None: + has_validator = True + if a.converter is not None: + has_converter = True + + if has_validator and has_converter: + break + if ( + ( + on_setattr == _ng_default_on_setattr + and not (has_validator or has_converter) + ) + or (on_setattr == setters.validate and not has_validator) + or (on_setattr == setters.convert and not has_converter) + ): + # If class-level on_setattr is set to convert + validate, but + # there's no field to convert or validate, pretend like there's + # no on_setattr. + self._on_setattr = None + + if getstate_setstate: + ( + self._cls_dict["__getstate__"], + self._cls_dict["__setstate__"], + ) = self._make_getstate_setstate() + + def __repr__(self): + return f"<_ClassBuilder(cls={self._cls.__name__})>" + + if PY310: + import abc + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self.abc.update_abstractmethods( + self._patch_original_class() + ) + + else: + + def build_class(self): + """ + Finalize class based on the accumulated configuration. + + Builder cannot be used after calling this method. + """ + if self._slots is True: + return self._create_slots_class() + + return self._patch_original_class() + + def _patch_original_class(self): + """ + Apply accumulated methods and return the class. + """ + cls = self._cls + base_names = self._base_names + + # Clean class of attribute definitions (`attr.ib()`s). + if self._delete_attribs: + for name in self._attr_names: + if ( + name not in base_names + and getattr(cls, name, _sentinel) is not _sentinel + ): + try: + delattr(cls, name) + except AttributeError: + # This can happen if a base class defines a class + # variable and we want to set an attribute with the + # same name by using only a type annotation. + pass + + # Attach our dunder methods. + for name, value in self._cls_dict.items(): + setattr(cls, name, value) + + # If we've inherited an attrs __setattr__ and don't write our own, + # reset it to object's. + if not self._wrote_own_setattr and getattr( + cls, "__attrs_own_setattr__", False + ): + cls.__attrs_own_setattr__ = False + + if not self._has_custom_setattr: + cls.__setattr__ = _obj_setattr + + return cls + + def _create_slots_class(self): + """ + Build and return a new class with a `__slots__` attribute. + """ + cd = { + k: v + for k, v in self._cls_dict.items() + if k not in tuple(self._attr_names) + ("__dict__", "__weakref__") + } + + # If our class doesn't have its own implementation of __setattr__ + # (either from the user or by us), check the bases, if one of them has + # an attrs-made __setattr__, that needs to be reset. We don't walk the + # MRO because we only care about our immediate base classes. + # XXX: This can be confused by subclassing a slotted attrs class with + # XXX: a non-attrs class and subclass the resulting class with an attrs + # XXX: class. See `test_slotted_confused` for details. For now that's + # XXX: OK with us. + if not self._wrote_own_setattr: + cd["__attrs_own_setattr__"] = False + + if not self._has_custom_setattr: + for base_cls in self._cls.__bases__: + if base_cls.__dict__.get("__attrs_own_setattr__", False): + cd["__setattr__"] = _obj_setattr + break + + # Traverse the MRO to collect existing slots + # and check for an existing __weakref__. + existing_slots = dict() + weakref_inherited = False + for base_cls in self._cls.__mro__[1:-1]: + if base_cls.__dict__.get("__weakref__", None) is not None: + weakref_inherited = True + existing_slots.update( + { + name: getattr(base_cls, name) + for name in getattr(base_cls, "__slots__", []) + } + ) + + base_names = set(self._base_names) + + names = self._attr_names + if ( + self._weakref_slot + and "__weakref__" not in getattr(self._cls, "__slots__", ()) + and "__weakref__" not in names + and not weakref_inherited + ): + names += ("__weakref__",) + + # We only add the names of attributes that aren't inherited. + # Setting __slots__ to inherited attributes wastes memory. + slot_names = [name for name in names if name not in base_names] + # There are slots for attributes from current class + # that are defined in parent classes. + # As their descriptors may be overridden by a child class, + # we collect them here and update the class dict + reused_slots = { + slot: slot_descriptor + for slot, slot_descriptor in existing_slots.items() + if slot in slot_names + } + slot_names = [name for name in slot_names if name not in reused_slots] + cd.update(reused_slots) + if self._cache_hash: + slot_names.append(_hash_cache_field) + cd["__slots__"] = tuple(slot_names) + + cd["__qualname__"] = self._cls.__qualname__ + + # Create new class based on old class and our methods. + cls = type(self._cls)(self._cls.__name__, self._cls.__bases__, cd) + + # The following is a fix for + # . + # If a method mentions `__class__` or uses the no-arg super(), the + # compiler will bake a reference to the class in the method itself + # as `method.__closure__`. Since we replace the class with a + # clone, we rewrite these references so it keeps working. + for item in cls.__dict__.values(): + if isinstance(item, (classmethod, staticmethod)): + # Class- and staticmethods hide their functions inside. + # These might need to be rewritten as well. + closure_cells = getattr(item.__func__, "__closure__", None) + elif isinstance(item, property): + # Workaround for property `super()` shortcut (PY3-only). + # There is no universal way for other descriptors. + closure_cells = getattr(item.fget, "__closure__", None) + else: + closure_cells = getattr(item, "__closure__", None) + + if not closure_cells: # Catch None or the empty list. + continue + for cell in closure_cells: + try: + match = cell.cell_contents is self._cls + except ValueError: # ValueError: Cell is empty + pass + else: + if match: + set_closure_cell(cell, cls) + + return cls + + def add_repr(self, ns): + self._cls_dict["__repr__"] = self._add_method_dunders( + _make_repr(self._attrs, ns, self._cls) + ) + return self + + def add_str(self): + repr = self._cls_dict.get("__repr__") + if repr is None: + raise ValueError( + "__str__ can only be generated if a __repr__ exists." + ) + + def __str__(self): + return self.__repr__() + + self._cls_dict["__str__"] = self._add_method_dunders(__str__) + return self + + def _make_getstate_setstate(self): + """ + Create custom __setstate__ and __getstate__ methods. + """ + # __weakref__ is not writable. + state_attr_names = tuple( + an for an in self._attr_names if an != "__weakref__" + ) + + def slots_getstate(self): + """ + Automatically created by attrs. + """ + return {name: getattr(self, name) for name in state_attr_names} + + hash_caching_enabled = self._cache_hash + + def slots_setstate(self, state): + """ + Automatically created by attrs. + """ + __bound_setattr = _obj_setattr.__get__(self) + for name in state_attr_names: + if name in state: + __bound_setattr(name, state[name]) + + # The hash code cache is not included when the object is + # serialized, but it still needs to be initialized to None to + # indicate that the first call to __hash__ should be a cache + # miss. + if hash_caching_enabled: + __bound_setattr(_hash_cache_field, None) + + return slots_getstate, slots_setstate + + def make_unhashable(self): + self._cls_dict["__hash__"] = None + return self + + def add_hash(self): + self._cls_dict["__hash__"] = self._add_method_dunders( + _make_hash( + self._cls, + self._attrs, + frozen=self._frozen, + cache_hash=self._cache_hash, + ) + ) + + return self + + def add_init(self): + self._cls_dict["__init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=False, + ) + ) + + return self + + def add_match_args(self): + self._cls_dict["__match_args__"] = tuple( + field.name + for field in self._attrs + if field.init and not field.kw_only + ) + + def add_attrs_init(self): + self._cls_dict["__attrs_init__"] = self._add_method_dunders( + _make_init( + self._cls, + self._attrs, + self._has_pre_init, + self._has_post_init, + self._frozen, + self._slots, + self._cache_hash, + self._base_attr_map, + self._is_exc, + self._on_setattr, + attrs_init=True, + ) + ) + + return self + + def add_eq(self): + cd = self._cls_dict + + cd["__eq__"] = self._add_method_dunders( + _make_eq(self._cls, self._attrs) + ) + cd["__ne__"] = self._add_method_dunders(_make_ne()) + + return self + + def add_order(self): + cd = self._cls_dict + + cd["__lt__"], cd["__le__"], cd["__gt__"], cd["__ge__"] = ( + self._add_method_dunders(meth) + for meth in _make_order(self._cls, self._attrs) + ) + + return self + + def add_setattr(self): + if self._frozen: + return self + + sa_attrs = {} + for a in self._attrs: + on_setattr = a.on_setattr or self._on_setattr + if on_setattr and on_setattr is not setters.NO_OP: + sa_attrs[a.name] = a, on_setattr + + if not sa_attrs: + return self + + if self._has_custom_setattr: + # We need to write a __setattr__ but there already is one! + raise ValueError( + "Can't combine custom __setattr__ with on_setattr hooks." + ) + + # docstring comes from _add_method_dunders + def __setattr__(self, name, val): + try: + a, hook = sa_attrs[name] + except KeyError: + nval = val + else: + nval = hook(self, a, val) + + _obj_setattr(self, name, nval) + + self._cls_dict["__attrs_own_setattr__"] = True + self._cls_dict["__setattr__"] = self._add_method_dunders(__setattr__) + self._wrote_own_setattr = True + + return self + + def _add_method_dunders(self, method): + """ + Add __module__ and __qualname__ to a *method* if possible. + """ + try: + method.__module__ = self._cls.__module__ + except AttributeError: + pass + + try: + method.__qualname__ = ".".join( + (self._cls.__qualname__, method.__name__) + ) + except AttributeError: + pass + + try: + method.__doc__ = ( + "Method generated by attrs for class " + f"{self._cls.__qualname__}." + ) + except AttributeError: + pass + + return method + + +def _determine_attrs_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + return cmp, cmp + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq = default_eq + + if order is None: + order = eq + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, order + + +def _determine_attrib_eq_order(cmp, eq, order, default_eq): + """ + Validate the combination of *cmp*, *eq*, and *order*. Derive the effective + values of eq and order. If *eq* is None, set it to *default_eq*. + """ + if cmp is not None and any((eq is not None, order is not None)): + raise ValueError("Don't mix `cmp` with `eq' and `order`.") + + def decide_callable_or_boolean(value): + """ + Decide whether a key function is used. + """ + if callable(value): + value, key = True, value + else: + key = None + return value, key + + # cmp takes precedence due to bw-compatibility. + if cmp is not None: + cmp, cmp_key = decide_callable_or_boolean(cmp) + return cmp, cmp_key, cmp, cmp_key + + # If left None, equality is set to the specified default and ordering + # mirrors equality. + if eq is None: + eq, eq_key = default_eq, None + else: + eq, eq_key = decide_callable_or_boolean(eq) + + if order is None: + order, order_key = eq, eq_key + else: + order, order_key = decide_callable_or_boolean(order) + + if eq is False and order is True: + raise ValueError("`order` can only be True if `eq` is True too.") + + return eq, eq_key, order, order_key + + +def _determine_whether_to_implement( + cls, flag, auto_detect, dunders, default=True +): + """ + Check whether we should implement a set of methods for *cls*. + + *flag* is the argument passed into @attr.s like 'init', *auto_detect* the + same as passed into @attr.s and *dunders* is a tuple of attribute names + whose presence signal that the user has implemented it themselves. + + Return *default* if no reason for either for or against is found. + """ + if flag is True or flag is False: + return flag + + if flag is None and auto_detect is False: + return default + + # Logically, flag is None and auto_detect is True here. + for dunder in dunders: + if _has_own_attribute(cls, dunder): + return False + + return default + + +def attrs( + maybe_cls=None, + these=None, + repr_ns=None, + repr=None, + cmp=None, + hash=None, + init=None, + slots=False, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=False, + kw_only=False, + cache_hash=False, + auto_exc=False, + eq=None, + order=None, + auto_detect=False, + collect_by_mro=False, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, + unsafe_hash=None, +): + r""" + A class decorator that adds :term:`dunder methods` according to the + specified attributes using `attr.ib` or the *these* argument. + + :param these: A dictionary of name to `attr.ib` mappings. This is + useful to avoid the definition of your attributes within the class body + because you can't (e.g. if you want to add ``__repr__`` methods to + Django models) or don't want to. + + If *these* is not ``None``, ``attrs`` will *not* search the class body + for attributes and will *not* remove any attributes from it. + + The order is deduced from the order of the attributes inside *these*. + + :type these: `dict` of `str` to `attr.ib` + + :param str repr_ns: When using nested classes, there's no way in Python 2 + to automatically detect that. Therefore it's possible to set the + namespace explicitly for a more meaningful ``repr`` output. + :param bool auto_detect: Instead of setting the *init*, *repr*, *eq*, + *order*, and *hash* arguments explicitly, assume they are set to + ``True`` **unless any** of the involved methods for one of the + arguments is implemented in the *current* class (i.e. it is *not* + inherited from some base class). + + So for example by implementing ``__eq__`` on a class yourself, + ``attrs`` will deduce ``eq=False`` and will create *neither* + ``__eq__`` *nor* ``__ne__`` (but Python classes come with a sensible + ``__ne__`` by default, so it *should* be enough to only implement + ``__eq__`` in most cases). + + .. warning:: + + If you prevent ``attrs`` from creating the ordering methods for you + (``order=False``, e.g. by implementing ``__le__``), it becomes + *your* responsibility to make sure its ordering is sound. The best + way is to use the `functools.total_ordering` decorator. + + + Passing ``True`` or ``False`` to *init*, *repr*, *eq*, *order*, + *cmp*, or *hash* overrides whatever *auto_detect* would determine. + + :param bool repr: Create a ``__repr__`` method with a human readable + representation of ``attrs`` attributes.. + :param bool str: Create a ``__str__`` method that is identical to + ``__repr__``. This is usually not necessary except for + `Exception`\ s. + :param Optional[bool] eq: If ``True`` or ``None`` (default), add ``__eq__`` + and ``__ne__`` methods that check two instances for equality. + + They compare the instances as if they were tuples of their ``attrs`` + attributes if and only if the types of both classes are *identical*! + :param Optional[bool] order: If ``True``, add ``__lt__``, ``__le__``, + ``__gt__``, and ``__ge__`` methods that behave like *eq* above and + allow instances to be ordered. If ``None`` (default) mirror value of + *eq*. + :param Optional[bool] cmp: Setting *cmp* is equivalent to setting *eq* + and *order* to the same value. Must not be mixed with *eq* or *order*. + :param Optional[bool] unsafe_hash: If ``None`` (default), the ``__hash__`` + method is generated according how *eq* and *frozen* are set. + + 1. If *both* are True, ``attrs`` will generate a ``__hash__`` for you. + 2. If *eq* is True and *frozen* is False, ``__hash__`` will be set to + None, marking it unhashable (which it is). + 3. If *eq* is False, ``__hash__`` will be left untouched meaning the + ``__hash__`` method of the base class will be used (if base class is + ``object``, this means it will fall back to id-based hashing.). + + Although not recommended, you can decide for yourself and force + ``attrs`` to create one (e.g. if the class is immutable even though you + didn't freeze it programmatically) by passing ``True`` or not. Both of + these cases are rather special and should be used carefully. + + See our documentation on `hashing`, Python's documentation on + `object.__hash__`, and the `GitHub issue that led to the default \ + behavior `_ for more + details. + :param Optional[bool] hash: Alias for *unsafe_hash*. *unsafe_hash* takes + precedence. + :param bool init: Create a ``__init__`` method that initializes the + ``attrs`` attributes. Leading underscores are stripped for the argument + name. If a ``__attrs_pre_init__`` method exists on the class, it will + be called before the class is initialized. If a ``__attrs_post_init__`` + method exists on the class, it will be called after the class is fully + initialized. + + If ``init`` is ``False``, an ``__attrs_init__`` method will be + injected instead. This allows you to define a custom ``__init__`` + method that can do pre-init work such as ``super().__init__()``, + and then call ``__attrs_init__()`` and ``__attrs_post_init__()``. + :param bool slots: Create a :term:`slotted class ` that's + more memory-efficient. Slotted classes are generally superior to the + default dict classes, but have some gotchas you should know about, so + we encourage you to read the :term:`glossary entry `. + :param bool frozen: Make instances immutable after initialization. If + someone attempts to modify a frozen instance, + `attr.exceptions.FrozenInstanceError` is raised. + + .. note:: + + 1. This is achieved by installing a custom ``__setattr__`` method + on your class, so you can't implement your own. + + 2. True immutability is impossible in Python. + + 3. This *does* have a minor a runtime performance `impact + ` when initializing new instances. In other words: + ``__init__`` is slightly slower with ``frozen=True``. + + 4. If a class is frozen, you cannot modify ``self`` in + ``__attrs_post_init__`` or a self-written ``__init__``. You can + circumvent that limitation by using + ``object.__setattr__(self, "attribute_name", value)``. + + 5. Subclasses of a frozen class are frozen too. + + :param bool weakref_slot: Make instances weak-referenceable. This has no + effect unless ``slots`` is also enabled. + :param bool auto_attribs: If ``True``, collect :pep:`526`-annotated + attributes from the class body. + + In this case, you **must** annotate every field. If ``attrs`` + encounters a field that is set to an `attr.ib` but lacks a type + annotation, an `attr.exceptions.UnannotatedAttributeError` is + raised. Use ``field_name: typing.Any = attr.ib(...)`` if you don't + want to set a type. + + If you assign a value to those attributes (e.g. ``x: int = 42``), that + value becomes the default value like if it were passed using + ``attr.ib(default=42)``. Passing an instance of `attrs.Factory` also + works as expected in most cases (see warning below). + + Attributes annotated as `typing.ClassVar`, and attributes that are + neither annotated nor set to an `attr.ib` are **ignored**. + + .. warning:: + For features that use the attribute name to create decorators (e.g. + `validators `), you still *must* assign `attr.ib` to + them. Otherwise Python will either not find the name or try to use + the default value to call e.g. ``validator`` on it. + + These errors can be quite confusing and probably the most common bug + report on our bug tracker. + + :param bool kw_only: Make all attributes keyword-only + in the generated ``__init__`` (if ``init`` is ``False``, this + parameter is ignored). + :param bool cache_hash: Ensure that the object's hash code is computed + only once and stored on the object. If this is set to ``True``, + hashing must be either explicitly or implicitly enabled for this + class. If the hash code is cached, avoid any reassignments of + fields involved in hash code computation or mutations of the objects + those fields point to after object creation. If such changes occur, + the behavior of the object's hash code is undefined. + :param bool auto_exc: If the class subclasses `BaseException` + (which implicitly includes any subclass of any exception), the + following happens to behave like a well-behaved Python exceptions + class: + + - the values for *eq*, *order*, and *hash* are ignored and the + instances compare and hash by the instance's ids (N.B. ``attrs`` will + *not* remove existing implementations of ``__hash__`` or the equality + methods. It just won't add own ones.), + - all attributes that are either passed into ``__init__`` or have a + default value are additionally available as a tuple in the ``args`` + attribute, + - the value of *str* is ignored leaving ``__str__`` to base classes. + :param bool collect_by_mro: Setting this to `True` fixes the way ``attrs`` + collects attributes from base classes. The default behavior is + incorrect in certain cases of multiple inheritance. It should be on by + default but is kept off for backward-compatibility. + + See issue `#428 `_ for + more details. + + :param Optional[bool] getstate_setstate: + .. note:: + This is usually only interesting for slotted classes and you should + probably just set *auto_detect* to `True`. + + If `True`, ``__getstate__`` and + ``__setstate__`` are generated and attached to the class. This is + necessary for slotted classes to be pickleable. If left `None`, it's + `True` by default for slotted classes and ``False`` for dict classes. + + If *auto_detect* is `True`, and *getstate_setstate* is left `None`, + and **either** ``__getstate__`` or ``__setstate__`` is detected directly + on the class (i.e. not inherited), it is set to `False` (this is usually + what you want). + + :param on_setattr: A callable that is run whenever the user attempts to set + an attribute (either by assignment like ``i.x = 42`` or by using + `setattr` like ``setattr(i, "x", 42)``). It receives the same arguments + as validators: the instance, the attribute that is being modified, and + the new value. + + If no exception is raised, the attribute is set to the return value of + the callable. + + If a list of callables is passed, they're automatically wrapped in an + `attrs.setters.pipe`. + :type on_setattr: `callable`, or a list of callables, or `None`, or + `attrs.setters.NO_OP` + + :param Optional[callable] field_transformer: + A function that is called with the original class object and all + fields right before ``attrs`` finalizes the class. You can use + this, e.g., to automatically add converters or validators to + fields based on their types. See `transform-fields` for more details. + + :param bool match_args: + If `True` (default), set ``__match_args__`` on the class to support + :pep:`634` (Structural Pattern Matching). It is a tuple of all + non-keyword-only ``__init__`` parameter names on Python 3.10 and later. + Ignored on older Python versions. + + .. versionadded:: 16.0.0 *slots* + .. versionadded:: 16.1.0 *frozen* + .. versionadded:: 16.3.0 *str* + .. versionadded:: 16.3.0 Support for ``__attrs_post_init__``. + .. versionchanged:: 17.1.0 + *hash* supports ``None`` as value which is also the default now. + .. versionadded:: 17.3.0 *auto_attribs* + .. versionchanged:: 18.1.0 + If *these* is passed, no attributes are deleted from the class body. + .. versionchanged:: 18.1.0 If *these* is ordered, the order is retained. + .. versionadded:: 18.2.0 *weakref_slot* + .. deprecated:: 18.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now raise a + `DeprecationWarning` if the classes compared are subclasses of + each other. ``__eq`` and ``__ne__`` never tried to compared subclasses + to each other. + .. versionchanged:: 19.2.0 + ``__lt__``, ``__le__``, ``__gt__``, and ``__ge__`` now do not consider + subclasses comparable anymore. + .. versionadded:: 18.2.0 *kw_only* + .. versionadded:: 18.2.0 *cache_hash* + .. versionadded:: 19.1.0 *auto_exc* + .. deprecated:: 19.2.0 *cmp* Removal on or after 2021-06-01. + .. versionadded:: 19.2.0 *eq* and *order* + .. versionadded:: 20.1.0 *auto_detect* + .. versionadded:: 20.1.0 *collect_by_mro* + .. versionadded:: 20.1.0 *getstate_setstate* + .. versionadded:: 20.1.0 *on_setattr* + .. versionadded:: 20.3.0 *field_transformer* + .. versionchanged:: 21.1.0 + ``init=False`` injects ``__attrs_init__`` + .. versionchanged:: 21.1.0 Support for ``__attrs_pre_init__`` + .. versionchanged:: 21.1.0 *cmp* undeprecated + .. versionadded:: 21.3.0 *match_args* + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + eq_, order_ = _determine_attrs_eq_order(cmp, eq, order, None) + + # unsafe_hash takes precedence due to PEP 681. + if unsafe_hash is not None: + hash = unsafe_hash + + if isinstance(on_setattr, (list, tuple)): + on_setattr = setters.pipe(*on_setattr) + + def wrap(cls): + is_frozen = frozen or _has_frozen_base_class(cls) + is_exc = auto_exc is True and issubclass(cls, BaseException) + has_own_setattr = auto_detect and _has_own_attribute( + cls, "__setattr__" + ) + + if has_own_setattr and is_frozen: + raise ValueError("Can't freeze a class with a custom __setattr__.") + + builder = _ClassBuilder( + cls, + these, + slots, + is_frozen, + weakref_slot, + _determine_whether_to_implement( + cls, + getstate_setstate, + auto_detect, + ("__getstate__", "__setstate__"), + default=slots, + ), + auto_attribs, + kw_only, + cache_hash, + is_exc, + collect_by_mro, + on_setattr, + has_own_setattr, + field_transformer, + ) + if _determine_whether_to_implement( + cls, repr, auto_detect, ("__repr__",) + ): + builder.add_repr(repr_ns) + if str is True: + builder.add_str() + + eq = _determine_whether_to_implement( + cls, eq_, auto_detect, ("__eq__", "__ne__") + ) + if not is_exc and eq is True: + builder.add_eq() + if not is_exc and _determine_whether_to_implement( + cls, order_, auto_detect, ("__lt__", "__le__", "__gt__", "__ge__") + ): + builder.add_order() + + builder.add_setattr() + + nonlocal hash + if ( + hash is None + and auto_detect is True + and _has_own_attribute(cls, "__hash__") + ): + hash = False + + if hash is not True and hash is not False and hash is not None: + # Can't use `hash in` because 1 == True for example. + raise TypeError( + "Invalid value for hash. Must be True, False, or None." + ) + elif hash is False or (hash is None and eq is False) or is_exc: + # Don't do anything. Should fall back to __object__'s __hash__ + # which is by id. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + elif hash is True or ( + hash is None and eq is True and is_frozen is True + ): + # Build a __hash__ if told so, or if it's safe. + builder.add_hash() + else: + # Raise TypeError on attempts to hash. + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " hashing must be either explicitly or implicitly " + "enabled." + ) + builder.make_unhashable() + + if _determine_whether_to_implement( + cls, init, auto_detect, ("__init__",) + ): + builder.add_init() + else: + builder.add_attrs_init() + if cache_hash: + raise TypeError( + "Invalid value for cache_hash. To use hash caching," + " init must be True." + ) + + if ( + PY310 + and match_args + and not _has_own_attribute(cls, "__match_args__") + ): + builder.add_match_args() + + return builder.build_class() + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +_attrs = attrs +""" +Internal alias so we can use it in functions that take an argument called +*attrs*. +""" + + +def _has_frozen_base_class(cls): + """ + Check whether *cls* has a frozen ancestor by looking at its + __setattr__. + """ + return cls.__setattr__ is _frozen_setattrs + + +def _generate_unique_filename(cls, func_name): + """ + Create a "filename" suitable for a function being generated. + """ + return ( + f"" + ) + + +def _make_hash(cls, attrs, frozen, cache_hash): + attrs = tuple( + a for a in attrs if a.hash is True or (a.hash is None and a.eq is True) + ) + + tab = " " + + unique_filename = _generate_unique_filename(cls, "hash") + type_hash = hash(unique_filename) + # If eq is custom generated, we need to include the functions in globs + globs = {} + + hash_def = "def __hash__(self" + hash_func = "hash((" + closing_braces = "))" + if not cache_hash: + hash_def += "):" + else: + hash_def += ", *" + + hash_def += ( + ", _cache_wrapper=" + + "__import__('attr._make')._make._CacheHashWrapper):" + ) + hash_func = "_cache_wrapper(" + hash_func + closing_braces += ")" + + method_lines = [hash_def] + + def append_hash_computation_lines(prefix, indent): + """ + Generate the code for actually computing the hash code. + Below this will either be returned directly or used to compute + a value which is then cached, depending on the value of cache_hash + """ + + method_lines.extend( + [ + indent + prefix + hash_func, + indent + f" {type_hash},", + ] + ) + + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + globs[cmp_name] = a.eq_key + method_lines.append( + indent + f" {cmp_name}(self.{a.name})," + ) + else: + method_lines.append(indent + f" self.{a.name},") + + method_lines.append(indent + " " + closing_braces) + + if cache_hash: + method_lines.append(tab + f"if self.{_hash_cache_field} is None:") + if frozen: + append_hash_computation_lines( + f"object.__setattr__(self, '{_hash_cache_field}', ", tab * 2 + ) + method_lines.append(tab * 2 + ")") # close __setattr__ + else: + append_hash_computation_lines( + f"self.{_hash_cache_field} = ", tab * 2 + ) + method_lines.append(tab + f"return self.{_hash_cache_field}") + else: + append_hash_computation_lines("return ", tab) + + script = "\n".join(method_lines) + return _make_method("__hash__", script, unique_filename, globs) + + +def _add_hash(cls, attrs): + """ + Add a hash method to *cls*. + """ + cls.__hash__ = _make_hash(cls, attrs, frozen=False, cache_hash=False) + return cls + + +def _make_ne(): + """ + Create __ne__ method. + """ + + def __ne__(self, other): + """ + Check equality and either forward a NotImplemented or + return the result negated. + """ + result = self.__eq__(other) + if result is NotImplemented: + return NotImplemented + + return not result + + return __ne__ + + +def _make_eq(cls, attrs): + """ + Create __eq__ method for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.eq] + + unique_filename = _generate_unique_filename(cls, "eq") + lines = [ + "def __eq__(self, other):", + " if other.__class__ is not self.__class__:", + " return NotImplemented", + ] + + # We can't just do a big self.x = other.x and... clause due to + # irregularities like nan == nan is false but (nan,) == (nan,) is true. + globs = {} + if attrs: + lines.append(" return (") + others = [" ) == ("] + for a in attrs: + if a.eq_key: + cmp_name = f"_{a.name}_key" + # Add the key function to the global namespace + # of the evaluated function. + globs[cmp_name] = a.eq_key + lines.append(f" {cmp_name}(self.{a.name}),") + others.append(f" {cmp_name}(other.{a.name}),") + else: + lines.append(f" self.{a.name},") + others.append(f" other.{a.name},") + + lines += others + [" )"] + else: + lines.append(" return True") + + script = "\n".join(lines) + + return _make_method("__eq__", script, unique_filename, globs) + + +def _make_order(cls, attrs): + """ + Create ordering methods for *cls* with *attrs*. + """ + attrs = [a for a in attrs if a.order] + + def attrs_to_tuple(obj): + """ + Save us some typing. + """ + return tuple( + key(value) if key else value + for value, key in ( + (getattr(obj, a.name), a.order_key) for a in attrs + ) + ) + + def __lt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) < attrs_to_tuple(other) + + return NotImplemented + + def __le__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) <= attrs_to_tuple(other) + + return NotImplemented + + def __gt__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) > attrs_to_tuple(other) + + return NotImplemented + + def __ge__(self, other): + """ + Automatically created by attrs. + """ + if other.__class__ is self.__class__: + return attrs_to_tuple(self) >= attrs_to_tuple(other) + + return NotImplemented + + return __lt__, __le__, __gt__, __ge__ + + +def _add_eq(cls, attrs=None): + """ + Add equality methods to *cls* with *attrs*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__eq__ = _make_eq(cls, attrs) + cls.__ne__ = _make_ne() + + return cls + + +def _make_repr(attrs, ns, cls): + unique_filename = _generate_unique_filename(cls, "repr") + # Figure out which attributes to include, and which function to use to + # format them. The a.repr value can be either bool or a custom + # callable. + attr_names_with_reprs = tuple( + (a.name, (repr if a.repr is True else a.repr), a.init) + for a in attrs + if a.repr is not False + ) + globs = { + name + "_repr": r for name, r, _ in attr_names_with_reprs if r != repr + } + globs["_compat"] = _compat + globs["AttributeError"] = AttributeError + globs["NOTHING"] = NOTHING + attribute_fragments = [] + for name, r, i in attr_names_with_reprs: + accessor = ( + "self." + name if i else 'getattr(self, "' + name + '", NOTHING)' + ) + fragment = ( + "%s={%s!r}" % (name, accessor) + if r == repr + else "%s={%s_repr(%s)}" % (name, name, accessor) + ) + attribute_fragments.append(fragment) + repr_fragment = ", ".join(attribute_fragments) + + if ns is None: + cls_name_fragment = '{self.__class__.__qualname__.rsplit(">.", 1)[-1]}' + else: + cls_name_fragment = ns + ".{self.__class__.__name__}" + + lines = [ + "def __repr__(self):", + " try:", + " already_repring = _compat.repr_context.already_repring", + " except AttributeError:", + " already_repring = {id(self),}", + " _compat.repr_context.already_repring = already_repring", + " else:", + " if id(self) in already_repring:", + " return '...'", + " else:", + " already_repring.add(id(self))", + " try:", + f" return f'{cls_name_fragment}({repr_fragment})'", + " finally:", + " already_repring.remove(id(self))", + ] + + return _make_method( + "__repr__", "\n".join(lines), unique_filename, globs=globs + ) + + +def _add_repr(cls, ns=None, attrs=None): + """ + Add a repr method to *cls*. + """ + if attrs is None: + attrs = cls.__attrs_attrs__ + + cls.__repr__ = _make_repr(attrs, ns, cls) + return cls + + +def fields(cls): + """ + Return the tuple of ``attrs`` attributes for a class. + + The tuple also allows accessing the fields by their names (see below for + examples). + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: tuple (with name accessors) of `attrs.Attribute` + + .. versionchanged:: 16.2.0 Returned tuple allows accessing the fields + by name. + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + return attrs + + +def fields_dict(cls): + """ + Return an ordered dictionary of ``attrs`` attributes for a class, whose + keys are the attribute names. + + :param type cls: Class to introspect. + + :raise TypeError: If *cls* is not a class. + :raise attr.exceptions.NotAnAttrsClassError: If *cls* is not an ``attrs`` + class. + + :rtype: dict + + .. versionadded:: 18.1.0 + """ + if not isinstance(cls, type): + raise TypeError("Passed object must be a class.") + attrs = getattr(cls, "__attrs_attrs__", None) + if attrs is None: + raise NotAnAttrsClassError(f"{cls!r} is not an attrs-decorated class.") + return {a.name: a for a in attrs} + + +def validate(inst): + """ + Validate all attributes on *inst* that have a validator. + + Leaves all exceptions through. + + :param inst: Instance of a class with ``attrs`` attributes. + """ + if _config._run_validators is False: + return + + for a in fields(inst.__class__): + v = a.validator + if v is not None: + v(inst, a, getattr(inst, a.name)) + + +def _is_slot_cls(cls): + return "__slots__" in cls.__dict__ + + +def _is_slot_attr(a_name, base_attr_map): + """ + Check if the attribute name comes from a slot class. + """ + return a_name in base_attr_map and _is_slot_cls(base_attr_map[a_name]) + + +def _make_init( + cls, + attrs, + pre_init, + post_init, + frozen, + slots, + cache_hash, + base_attr_map, + is_exc, + cls_on_setattr, + attrs_init, +): + has_cls_on_setattr = ( + cls_on_setattr is not None and cls_on_setattr is not setters.NO_OP + ) + + if frozen and has_cls_on_setattr: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = cache_hash or frozen + filtered_attrs = [] + attr_dict = {} + for a in attrs: + if not a.init and a.default is NOTHING: + continue + + filtered_attrs.append(a) + attr_dict[a.name] = a + + if a.on_setattr is not None: + if frozen is True: + raise ValueError("Frozen classes can't use on_setattr.") + + needs_cached_setattr = True + elif has_cls_on_setattr and a.on_setattr is not setters.NO_OP: + needs_cached_setattr = True + + unique_filename = _generate_unique_filename(cls, "init") + + script, globs, annotations = _attrs_to_init_script( + filtered_attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, + ) + if cls.__module__ in sys.modules: + # This makes typing.get_type_hints(CLS.__init__) resolve string types. + globs.update(sys.modules[cls.__module__].__dict__) + + globs.update({"NOTHING": NOTHING, "attr_dict": attr_dict}) + + if needs_cached_setattr: + # Save the lookup overhead in __init__ if we need to circumvent + # setattr hooks. + globs["_cached_setattr_get"] = _obj_setattr.__get__ + + init = _make_method( + "__attrs_init__" if attrs_init else "__init__", + script, + unique_filename, + globs, + ) + init.__annotations__ = annotations + + return init + + +def _setattr(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*. + """ + return f"_setattr('{attr_name}', {value_var})" + + +def _setattr_with_converter(attr_name, value_var, has_on_setattr): + """ + Use the cached object.setattr to set *attr_name* to *value_var*, but run + its converter first. + """ + return "_setattr('%s', %s(%s))" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _assign(attr_name, value, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment. Otherwise + relegate to _setattr. + """ + if has_on_setattr: + return _setattr(attr_name, value, True) + + return f"self.{attr_name} = {value}" + + +def _assign_with_converter(attr_name, value_var, has_on_setattr): + """ + Unless *attr_name* has an on_setattr hook, use normal assignment after + conversion. Otherwise relegate to _setattr_with_converter. + """ + if has_on_setattr: + return _setattr_with_converter(attr_name, value_var, True) + + return "self.%s = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + +def _attrs_to_init_script( + attrs, + frozen, + slots, + pre_init, + post_init, + cache_hash, + base_attr_map, + is_exc, + needs_cached_setattr, + has_cls_on_setattr, + attrs_init, +): + """ + Return a script of an initializer for *attrs* and a dict of globals. + + The globals are expected by the generated script. + + If *frozen* is True, we cannot set the attributes directly so we use + a cached ``object.__setattr__``. + """ + lines = [] + if pre_init: + lines.append("self.__attrs_pre_init__()") + + if needs_cached_setattr: + lines.append( + # Circumvent the __setattr__ descriptor to save one lookup per + # assignment. + # Note _setattr will be used again below if cache_hash is True + "_setattr = _cached_setattr_get(self)" + ) + + if frozen is True: + if slots is True: + fmt_setter = _setattr + fmt_setter_with_converter = _setattr_with_converter + else: + # Dict frozen classes assign directly to __dict__. + # But only if the attribute doesn't come from an ancestor slot + # class. + # Note _inst_dict will be used again below if cache_hash is True + lines.append("_inst_dict = self.__dict__") + + def fmt_setter(attr_name, value_var, has_on_setattr): + if _is_slot_attr(attr_name, base_attr_map): + return _setattr(attr_name, value_var, has_on_setattr) + + return f"_inst_dict['{attr_name}'] = {value_var}" + + def fmt_setter_with_converter( + attr_name, value_var, has_on_setattr + ): + if has_on_setattr or _is_slot_attr(attr_name, base_attr_map): + return _setattr_with_converter( + attr_name, value_var, has_on_setattr + ) + + return "_inst_dict['%s'] = %s(%s)" % ( + attr_name, + _init_converter_pat % (attr_name,), + value_var, + ) + + else: + # Not frozen. + fmt_setter = _assign + fmt_setter_with_converter = _assign_with_converter + + args = [] + kw_only_args = [] + attrs_to_validate = [] + + # This is a dictionary of names to validator and converter callables. + # Injecting this into __init__ globals lets us avoid lookups. + names_for_globals = {} + annotations = {"return": None} + + for a in attrs: + if a.validator: + attrs_to_validate.append(a) + + attr_name = a.name + has_on_setattr = a.on_setattr is not None or ( + a.on_setattr is not setters.NO_OP and has_cls_on_setattr + ) + # a.alias is set to maybe-mangled attr_name in _ClassBuilder if not + # explicitly provided + arg_name = a.alias + + has_factory = isinstance(a.default, Factory) + if has_factory and a.default.takes_self: + maybe_self = "self" + else: + maybe_self = "" + + if a.init is False: + if has_factory: + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + init_factory_name + f"({maybe_self})", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + conv_name = _init_converter_pat % (a.name,) + names_for_globals[conv_name] = a.converter + else: + lines.append( + fmt_setter( + attr_name, + f"attr_dict['{attr_name}'].default", + has_on_setattr, + ) + ) + elif a.default is not NOTHING and not has_factory: + arg = f"{arg_name}=attr_dict['{attr_name}'].default" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + elif has_factory: + arg = f"{arg_name}=NOTHING" + if a.kw_only: + kw_only_args.append(arg) + else: + args.append(arg) + lines.append(f"if {arg_name} is not NOTHING:") + + init_factory_name = _init_factory_pat % (a.name,) + if a.converter is not None: + lines.append( + " " + + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter_with_converter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append( + " " + fmt_setter(attr_name, arg_name, has_on_setattr) + ) + lines.append("else:") + lines.append( + " " + + fmt_setter( + attr_name, + init_factory_name + "(" + maybe_self + ")", + has_on_setattr, + ) + ) + names_for_globals[init_factory_name] = a.default.factory + else: + if a.kw_only: + kw_only_args.append(arg_name) + else: + args.append(arg_name) + + if a.converter is not None: + lines.append( + fmt_setter_with_converter( + attr_name, arg_name, has_on_setattr + ) + ) + names_for_globals[ + _init_converter_pat % (a.name,) + ] = a.converter + else: + lines.append(fmt_setter(attr_name, arg_name, has_on_setattr)) + + if a.init is True: + if a.type is not None and a.converter is None: + annotations[arg_name] = a.type + elif a.converter is not None: + # Try to get the type from the converter. + t = _AnnotationExtractor(a.converter).get_first_param_type() + if t: + annotations[arg_name] = t + + if attrs_to_validate: # we can skip this if there are no validators. + names_for_globals["_config"] = _config + lines.append("if _config._run_validators is True:") + for a in attrs_to_validate: + val_name = "__attr_validator_" + a.name + attr_name = "__attr_" + a.name + lines.append(f" {val_name}(self, {attr_name}, self.{a.name})") + names_for_globals[val_name] = a.validator + names_for_globals[attr_name] = a + + if post_init: + lines.append("self.__attrs_post_init__()") + + # because this is set only after __attrs_post_init__ is called, a crash + # will result if post-init tries to access the hash code. This seemed + # preferable to setting this beforehand, in which case alteration to + # field values during post-init combined with post-init accessing the + # hash code would result in silent bugs. + if cache_hash: + if frozen: + if slots: + # if frozen and slots, then _setattr defined above + init_hash_cache = "_setattr('%s', %s)" + else: + # if frozen and not slots, then _inst_dict defined above + init_hash_cache = "_inst_dict['%s'] = %s" + else: + init_hash_cache = "self.%s = %s" + lines.append(init_hash_cache % (_hash_cache_field, "None")) + + # For exceptions we rely on BaseException.__init__ for proper + # initialization. + if is_exc: + vals = ",".join(f"self.{a.name}" for a in attrs if a.init) + + lines.append(f"BaseException.__init__(self, {vals})") + + args = ", ".join(args) + if kw_only_args: + args += "%s*, %s" % ( + ", " if args else "", # leading comma + ", ".join(kw_only_args), # kw_only args + ) + + return ( + "def %s(self, %s):\n %s\n" + % ( + ("__attrs_init__" if attrs_init else "__init__"), + args, + "\n ".join(lines) if lines else "pass", + ), + names_for_globals, + annotations, + ) + + +def _default_init_alias_for(name: str) -> str: + """ + The default __init__ parameter name for a field. + + This performs private-name adjustment via leading-unscore stripping, + and is the default value of Attribute.alias if not provided. + """ + + return name.lstrip("_") + + +class Attribute: + """ + *Read-only* representation of an attribute. + + The class has *all* arguments of `attr.ib` (except for ``factory`` + which is only syntactic sugar for ``default=Factory(...)`` plus the + following: + + - ``name`` (`str`): The name of the attribute. + - ``alias`` (`str`): The __init__ parameter name of the attribute, after + any explicit overrides and default private-attribute-name handling. + - ``inherited`` (`bool`): Whether or not that attribute has been inherited + from a base class. + - ``eq_key`` and ``order_key`` (`typing.Callable` or `None`): The callables + that are used for comparing and ordering objects by this attribute, + respectively. These are set by passing a callable to `attr.ib`'s ``eq``, + ``order``, or ``cmp`` arguments. See also :ref:`comparison customization + `. + + Instances of this class are frequently used for introspection purposes + like: + + - `fields` returns a tuple of them. + - Validators get them passed as the first argument. + - The :ref:`field transformer ` hook receives a list of + them. + - The ``alias`` property exposes the __init__ parameter name of the field, + with any overrides and default private-attribute handling applied. + + + .. versionadded:: 20.1.0 *inherited* + .. versionadded:: 20.1.0 *on_setattr* + .. versionchanged:: 20.2.0 *inherited* is not taken into account for + equality checks and hashing anymore. + .. versionadded:: 21.1.0 *eq_key* and *order_key* + .. versionadded:: 22.2.0 *alias* + + For the full version history of the fields, see `attr.ib`. + """ + + __slots__ = ( + "name", + "default", + "validator", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "type", + "converter", + "kw_only", + "inherited", + "on_setattr", + "alias", + ) + + def __init__( + self, + name, + default, + validator, + repr, + cmp, # XXX: unused, remove along with other cmp code. + hash, + init, + inherited, + metadata=None, + type=None, + converter=None, + kw_only=False, + eq=None, + eq_key=None, + order=None, + order_key=None, + on_setattr=None, + alias=None, + ): + eq, eq_key, order, order_key = _determine_attrib_eq_order( + cmp, eq_key or eq, order_key or order, True + ) + + # Cache this descriptor here to speed things up later. + bound_setattr = _obj_setattr.__get__(self) + + # Despite the big red warning, people *do* instantiate `Attribute` + # themselves. + bound_setattr("name", name) + bound_setattr("default", default) + bound_setattr("validator", validator) + bound_setattr("repr", repr) + bound_setattr("eq", eq) + bound_setattr("eq_key", eq_key) + bound_setattr("order", order) + bound_setattr("order_key", order_key) + bound_setattr("hash", hash) + bound_setattr("init", init) + bound_setattr("converter", converter) + bound_setattr( + "metadata", + ( + types.MappingProxyType(dict(metadata)) # Shallow copy + if metadata + else _empty_metadata_singleton + ), + ) + bound_setattr("type", type) + bound_setattr("kw_only", kw_only) + bound_setattr("inherited", inherited) + bound_setattr("on_setattr", on_setattr) + bound_setattr("alias", alias) + + def __setattr__(self, name, value): + raise FrozenInstanceError() + + @classmethod + def from_counting_attr(cls, name, ca, type=None): + # type holds the annotated value. deal with conflicts: + if type is None: + type = ca.type + elif ca.type is not None: + raise ValueError( + "Type annotation and type argument cannot both be present" + ) + inst_dict = { + k: getattr(ca, k) + for k in Attribute.__slots__ + if k + not in ( + "name", + "validator", + "default", + "type", + "inherited", + ) # exclude methods and deprecated alias + } + return cls( + name=name, + validator=ca._validator, + default=ca._default, + type=type, + cmp=None, + inherited=False, + **inst_dict, + ) + + # Don't use attr.evolve since fields(Attribute) doesn't work + def evolve(self, **changes): + """ + Copy *self* and apply *changes*. + + This works similarly to `attr.evolve` but that function does not work + with ``Attribute``. + + It is mainly meant to be used for `transform-fields`. + + .. versionadded:: 20.3.0 + """ + new = copy.copy(self) + + new._setattrs(changes.items()) + + return new + + # Don't use _add_pickle since fields(Attribute) doesn't work + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple( + getattr(self, name) if name != "metadata" else dict(self.metadata) + for name in self.__slots__ + ) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + self._setattrs(zip(self.__slots__, state)) + + def _setattrs(self, name_values_pairs): + bound_setattr = _obj_setattr.__get__(self) + for name, value in name_values_pairs: + if name != "metadata": + bound_setattr(name, value) + else: + bound_setattr( + name, + types.MappingProxyType(dict(value)) + if value + else _empty_metadata_singleton, + ) + + +_a = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=(name != "metadata"), + init=True, + inherited=False, + alias=_default_init_alias_for(name), + ) + for name in Attribute.__slots__ +] + +Attribute = _add_hash( + _add_eq( + _add_repr(Attribute, attrs=_a), + attrs=[a for a in _a if a.name != "inherited"], + ), + attrs=[a for a in _a if a.hash and a.name != "inherited"], +) + + +class _CountingAttr: + """ + Intermediate representation of attributes that uses a counter to preserve + the order in which the attributes have been defined. + + *Internal* data structure of the attrs library. Running into is most + likely the result of a bug like a forgotten `@attr.s` decorator. + """ + + __slots__ = ( + "counter", + "_default", + "repr", + "eq", + "eq_key", + "order", + "order_key", + "hash", + "init", + "metadata", + "_validator", + "converter", + "type", + "kw_only", + "on_setattr", + "alias", + ) + __attrs_attrs__ = tuple( + Attribute( + name=name, + alias=_default_init_alias_for(name), + default=NOTHING, + validator=None, + repr=True, + cmp=None, + hash=True, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ) + for name in ( + "counter", + "_default", + "repr", + "eq", + "order", + "hash", + "init", + "on_setattr", + "alias", + ) + ) + ( + Attribute( + name="metadata", + alias="metadata", + default=None, + validator=None, + repr=True, + cmp=None, + hash=False, + init=True, + kw_only=False, + eq=True, + eq_key=None, + order=False, + order_key=None, + inherited=False, + on_setattr=None, + ), + ) + cls_counter = 0 + + def __init__( + self, + default, + validator, + repr, + cmp, + hash, + init, + converter, + metadata, + type, + kw_only, + eq, + eq_key, + order, + order_key, + on_setattr, + alias, + ): + _CountingAttr.cls_counter += 1 + self.counter = _CountingAttr.cls_counter + self._default = default + self._validator = validator + self.converter = converter + self.repr = repr + self.eq = eq + self.eq_key = eq_key + self.order = order + self.order_key = order_key + self.hash = hash + self.init = init + self.metadata = metadata + self.type = type + self.kw_only = kw_only + self.on_setattr = on_setattr + self.alias = alias + + def validator(self, meth): + """ + Decorator that adds *meth* to the list of validators. + + Returns *meth* unchanged. + + .. versionadded:: 17.1.0 + """ + if self._validator is None: + self._validator = meth + else: + self._validator = and_(self._validator, meth) + return meth + + def default(self, meth): + """ + Decorator that allows to set the default for an attribute. + + Returns *meth* unchanged. + + :raises DefaultAlreadySetError: If default has been set before. + + .. versionadded:: 17.1.0 + """ + if self._default is not NOTHING: + raise DefaultAlreadySetError() + + self._default = Factory(meth, takes_self=True) + + return meth + + +_CountingAttr = _add_eq(_add_repr(_CountingAttr)) + + +class Factory: + """ + Stores a factory callable. + + If passed as the default value to `attrs.field`, the factory is used to + generate a new value. + + :param callable factory: A callable that takes either none or exactly one + mandatory positional argument depending on *takes_self*. + :param bool takes_self: Pass the partially initialized instance that is + being initialized as a positional argument. + + .. versionadded:: 17.1.0 *takes_self* + """ + + __slots__ = ("factory", "takes_self") + + def __init__(self, factory, takes_self=False): + """ + `Factory` is part of the default machinery so if we want a default + value here, we have to implement it ourselves. + """ + self.factory = factory + self.takes_self = takes_self + + def __getstate__(self): + """ + Play nice with pickle. + """ + return tuple(getattr(self, name) for name in self.__slots__) + + def __setstate__(self, state): + """ + Play nice with pickle. + """ + for name, value in zip(self.__slots__, state): + setattr(self, name, value) + + +_f = [ + Attribute( + name=name, + default=NOTHING, + validator=None, + repr=True, + cmp=None, + eq=True, + order=False, + hash=True, + init=True, + inherited=False, + ) + for name in Factory.__slots__ +] + +Factory = _add_hash(_add_eq(_add_repr(Factory, attrs=_f), attrs=_f), attrs=_f) + + +def make_class(name, attrs, bases=(object,), **attributes_arguments): + """ + A quick way to create a new class called *name* with *attrs*. + + :param str name: The name for the new class. + + :param attrs: A list of names or a dictionary of mappings of names to + attributes. + + The order is deduced from the order of the names or attributes inside + *attrs*. Otherwise the order of the definition of the attributes is + used. + :type attrs: `list` or `dict` + + :param tuple bases: Classes that the new class will subclass. + + :param attributes_arguments: Passed unmodified to `attr.s`. + + :return: A new class with *attrs*. + :rtype: type + + .. versionadded:: 17.1.0 *bases* + .. versionchanged:: 18.1.0 If *attrs* is ordered, the order is retained. + """ + if isinstance(attrs, dict): + cls_dict = attrs + elif isinstance(attrs, (list, tuple)): + cls_dict = {a: attrib() for a in attrs} + else: + raise TypeError("attrs argument must be a dict or a list.") + + pre_init = cls_dict.pop("__attrs_pre_init__", None) + post_init = cls_dict.pop("__attrs_post_init__", None) + user_init = cls_dict.pop("__init__", None) + + body = {} + if pre_init is not None: + body["__attrs_pre_init__"] = pre_init + if post_init is not None: + body["__attrs_post_init__"] = post_init + if user_init is not None: + body["__init__"] = user_init + + type_ = types.new_class(name, bases, {}, lambda ns: ns.update(body)) + + # For pickling to work, the __module__ variable needs to be set to the + # frame where the class is created. Bypass this step in environments where + # sys._getframe is not defined (Jython for example) or sys._getframe is not + # defined for arguments greater than 0 (IronPython). + try: + type_.__module__ = sys._getframe(1).f_globals.get( + "__name__", "__main__" + ) + except (AttributeError, ValueError): + pass + + # We do it here for proper warnings with meaningful stacklevel. + cmp = attributes_arguments.pop("cmp", None) + ( + attributes_arguments["eq"], + attributes_arguments["order"], + ) = _determine_attrs_eq_order( + cmp, + attributes_arguments.get("eq"), + attributes_arguments.get("order"), + True, + ) + + return _attrs(these=cls_dict, **attributes_arguments)(type_) + + +# These are required by within this module so we define them here and merely +# import into .validators / .converters. + + +@attrs(slots=True, hash=True) +class _AndValidator: + """ + Compose many validators to a single one. + """ + + _validators = attrib() + + def __call__(self, inst, attr, value): + for v in self._validators: + v(inst, attr, value) + + +def and_(*validators): + """ + A validator that composes multiple validators into one. + + When called on a value, it runs all wrapped validators. + + :param callables validators: Arbitrary number of validators. + + .. versionadded:: 17.1.0 + """ + vals = [] + for validator in validators: + vals.extend( + validator._validators + if isinstance(validator, _AndValidator) + else [validator] + ) + + return _AndValidator(tuple(vals)) + + +def pipe(*converters): + """ + A converter that composes multiple converters into one. + + When called on a value, it runs all wrapped converters, returning the + *last* value. + + Type annotations will be inferred from the wrapped converters', if + they have any. + + :param callables converters: Arbitrary number of converters. + + .. versionadded:: 20.1.0 + """ + + def pipe_converter(val): + for converter in converters: + val = converter(val) + + return val + + if not converters: + # If the converter list is empty, pipe_converter is the identity. + A = typing.TypeVar("A") + pipe_converter.__annotations__ = {"val": A, "return": A} + else: + # Get parameter type from first converter. + t = _AnnotationExtractor(converters[0]).get_first_param_type() + if t: + pipe_converter.__annotations__["val"] = t + + # Get return type from last converter. + rt = _AnnotationExtractor(converters[-1]).get_return_type() + if rt: + pipe_converter.__annotations__["return"] = rt + + return pipe_converter diff --git a/pythonFiles/lib/jedilsp/attr/_next_gen.py b/pythonFiles/lib/jedilsp/attr/_next_gen.py new file mode 100644 index 0000000000000..c59d8486a089a --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_next_gen.py @@ -0,0 +1,226 @@ +# SPDX-License-Identifier: MIT + +""" +These are keyword-only APIs that call `attr.s` and `attr.ib` with different +default values. +""" + + +from functools import partial + +from . import setters +from ._funcs import asdict as _asdict +from ._funcs import astuple as _astuple +from ._make import ( + NOTHING, + _frozen_setattrs, + _ng_default_on_setattr, + attrib, + attrs, +) +from .exceptions import UnannotatedAttributeError + + +def define( + maybe_cls=None, + *, + these=None, + repr=None, + unsafe_hash=None, + hash=None, + init=None, + slots=True, + frozen=False, + weakref_slot=True, + str=False, + auto_attribs=None, + kw_only=False, + cache_hash=False, + auto_exc=True, + eq=None, + order=False, + auto_detect=True, + getstate_setstate=None, + on_setattr=None, + field_transformer=None, + match_args=True, +): + r""" + Define an ``attrs`` class. + + Differences to the classic `attr.s` that it uses underneath: + + - Automatically detect whether or not *auto_attribs* should be `True` (c.f. + *auto_attribs* parameter). + - If *frozen* is `False`, run converters and validators when setting an + attribute by default. + - *slots=True* + + .. caution:: + + Usually this has only upsides and few visible effects in everyday + programming. But it *can* lead to some suprising behaviors, so please + make sure to read :term:`slotted classes`. + - *auto_exc=True* + - *auto_detect=True* + - *order=False* + - Some options that were only relevant on Python 2 or were kept around for + backwards-compatibility have been removed. + + Please note that these are all defaults and you can change them as you + wish. + + :param Optional[bool] auto_attribs: If set to `True` or `False`, it behaves + exactly like `attr.s`. If left `None`, `attr.s` will try to guess: + + 1. If any attributes are annotated and no unannotated `attrs.fields`\ s + are found, it assumes *auto_attribs=True*. + 2. Otherwise it assumes *auto_attribs=False* and tries to collect + `attrs.fields`\ s. + + For now, please refer to `attr.s` for the rest of the parameters. + + .. versionadded:: 20.1.0 + .. versionchanged:: 21.3.0 Converters are also run ``on_setattr``. + .. versionadded:: 22.2.0 + *unsafe_hash* as an alias for *hash* (for :pep:`681` compliance). + """ + + def do_it(cls, auto_attribs): + return attrs( + maybe_cls=cls, + these=these, + repr=repr, + hash=hash, + unsafe_hash=unsafe_hash, + init=init, + slots=slots, + frozen=frozen, + weakref_slot=weakref_slot, + str=str, + auto_attribs=auto_attribs, + kw_only=kw_only, + cache_hash=cache_hash, + auto_exc=auto_exc, + eq=eq, + order=order, + auto_detect=auto_detect, + collect_by_mro=True, + getstate_setstate=getstate_setstate, + on_setattr=on_setattr, + field_transformer=field_transformer, + match_args=match_args, + ) + + def wrap(cls): + """ + Making this a wrapper ensures this code runs during class creation. + + We also ensure that frozen-ness of classes is inherited. + """ + nonlocal frozen, on_setattr + + had_on_setattr = on_setattr not in (None, setters.NO_OP) + + # By default, mutable classes convert & validate on setattr. + if frozen is False and on_setattr is None: + on_setattr = _ng_default_on_setattr + + # However, if we subclass a frozen class, we inherit the immutability + # and disable on_setattr. + for base_cls in cls.__bases__: + if base_cls.__setattr__ is _frozen_setattrs: + if had_on_setattr: + raise ValueError( + "Frozen classes can't use on_setattr " + "(frozen-ness was inherited)." + ) + + on_setattr = setters.NO_OP + break + + if auto_attribs is not None: + return do_it(cls, auto_attribs) + + try: + return do_it(cls, True) + except UnannotatedAttributeError: + return do_it(cls, False) + + # maybe_cls's type depends on the usage of the decorator. It's a class + # if it's used as `@attrs` but ``None`` if used as `@attrs()`. + if maybe_cls is None: + return wrap + else: + return wrap(maybe_cls) + + +mutable = define +frozen = partial(define, frozen=True, on_setattr=None) + + +def field( + *, + default=NOTHING, + validator=None, + repr=True, + hash=None, + init=True, + metadata=None, + converter=None, + factory=None, + kw_only=False, + eq=None, + order=None, + on_setattr=None, + alias=None, +): + """ + Identical to `attr.ib`, except keyword-only and with some arguments + removed. + + .. versionadded:: 20.1.0 + """ + return attrib( + default=default, + validator=validator, + repr=repr, + hash=hash, + init=init, + metadata=metadata, + converter=converter, + factory=factory, + kw_only=kw_only, + eq=eq, + order=order, + on_setattr=on_setattr, + alias=alias, + ) + + +def asdict(inst, *, recurse=True, filter=None, value_serializer=None): + """ + Same as `attr.asdict`, except that collections types are always retained + and dict is always used as *dict_factory*. + + .. versionadded:: 21.3.0 + """ + return _asdict( + inst=inst, + recurse=recurse, + filter=filter, + value_serializer=value_serializer, + retain_collection_types=True, + ) + + +def astuple(inst, *, recurse=True, filter=None): + """ + Same as `attr.astuple`, except that collections types are always retained + and `tuple` is always used as the *tuple_factory*. + + .. versionadded:: 21.3.0 + """ + return _astuple( + inst=inst, recurse=recurse, filter=filter, retain_collection_types=True + ) diff --git a/pythonFiles/lib/jedilsp/attr/_typing_compat.pyi b/pythonFiles/lib/jedilsp/attr/_typing_compat.pyi new file mode 100644 index 0000000000000..ca7b71e906a28 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_typing_compat.pyi @@ -0,0 +1,15 @@ +from typing import Any, ClassVar, Protocol + +# MYPY is a special constant in mypy which works the same way as `TYPE_CHECKING`. +MYPY = False + +if MYPY: + # A protocol to be able to statically accept an attrs class. + class AttrsInstance_(Protocol): + __attrs_attrs__: ClassVar[Any] + +else: + # For type checkers without plug-in support use an empty protocol that + # will (hopefully) be combined into a union. + class AttrsInstance_(Protocol): + pass diff --git a/pythonFiles/lib/jedilsp/attr/_version_info.py b/pythonFiles/lib/jedilsp/attr/_version_info.py new file mode 100644 index 0000000000000..51a1312f9759f --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_version_info.py @@ -0,0 +1,86 @@ +# SPDX-License-Identifier: MIT + + +from functools import total_ordering + +from ._funcs import astuple +from ._make import attrib, attrs + + +@total_ordering +@attrs(eq=False, order=False, slots=True, frozen=True) +class VersionInfo: + """ + A version object that can be compared to tuple of length 1--4: + + >>> attr.VersionInfo(19, 1, 0, "final") <= (19, 2) + True + >>> attr.VersionInfo(19, 1, 0, "final") < (19, 1, 1) + True + >>> vi = attr.VersionInfo(19, 2, 0, "final") + >>> vi < (19, 1, 1) + False + >>> vi < (19,) + False + >>> vi == (19, 2,) + True + >>> vi == (19, 2, 1) + False + + .. versionadded:: 19.2 + """ + + year = attrib(type=int) + minor = attrib(type=int) + micro = attrib(type=int) + releaselevel = attrib(type=str) + + @classmethod + def _from_version_string(cls, s): + """ + Parse *s* and return a _VersionInfo. + """ + v = s.split(".") + if len(v) == 3: + v.append("final") + + return cls( + year=int(v[0]), minor=int(v[1]), micro=int(v[2]), releaselevel=v[3] + ) + + def _ensure_tuple(self, other): + """ + Ensure *other* is a tuple of a valid length. + + Returns a possibly transformed *other* and ourselves as a tuple of + the same length as *other*. + """ + + if self.__class__ is other.__class__: + other = astuple(other) + + if not isinstance(other, tuple): + raise NotImplementedError + + if not (1 <= len(other) <= 4): + raise NotImplementedError + + return astuple(self)[: len(other)], other + + def __eq__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + return us == them + + def __lt__(self, other): + try: + us, them = self._ensure_tuple(other) + except NotImplementedError: + return NotImplemented + + # Since alphabetically "dev0" < "final" < "post1" < "post2", we don't + # have to do anything special with releaselevel for now. + return us < them diff --git a/pythonFiles/lib/jedilsp/attr/_version_info.pyi b/pythonFiles/lib/jedilsp/attr/_version_info.pyi new file mode 100644 index 0000000000000..45ced08633778 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/_version_info.pyi @@ -0,0 +1,9 @@ +class VersionInfo: + @property + def year(self) -> int: ... + @property + def minor(self) -> int: ... + @property + def micro(self) -> int: ... + @property + def releaselevel(self) -> str: ... diff --git a/pythonFiles/lib/jedilsp/attr/converters.py b/pythonFiles/lib/jedilsp/attr/converters.py new file mode 100644 index 0000000000000..4cada106b01c5 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/converters.py @@ -0,0 +1,144 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful converters. +""" + + +import typing + +from ._compat import _AnnotationExtractor +from ._make import NOTHING, Factory, pipe + + +__all__ = [ + "default_if_none", + "optional", + "pipe", + "to_bool", +] + + +def optional(converter): + """ + A converter that allows an attribute to be optional. An optional attribute + is one which can be set to ``None``. + + Type annotations will be inferred from the wrapped converter's, if it + has any. + + :param callable converter: the converter that is used for non-``None`` + values. + + .. versionadded:: 17.1.0 + """ + + def optional_converter(val): + if val is None: + return None + return converter(val) + + xtr = _AnnotationExtractor(converter) + + t = xtr.get_first_param_type() + if t: + optional_converter.__annotations__["val"] = typing.Optional[t] + + rt = xtr.get_return_type() + if rt: + optional_converter.__annotations__["return"] = typing.Optional[rt] + + return optional_converter + + +def default_if_none(default=NOTHING, factory=None): + """ + A converter that allows to replace ``None`` values by *default* or the + result of *factory*. + + :param default: Value to be used if ``None`` is passed. Passing an instance + of `attrs.Factory` is supported, however the ``takes_self`` option + is *not*. + :param callable factory: A callable that takes no parameters whose result + is used if ``None`` is passed. + + :raises TypeError: If **neither** *default* or *factory* is passed. + :raises TypeError: If **both** *default* and *factory* are passed. + :raises ValueError: If an instance of `attrs.Factory` is passed with + ``takes_self=True``. + + .. versionadded:: 18.2.0 + """ + if default is NOTHING and factory is None: + raise TypeError("Must pass either `default` or `factory`.") + + if default is not NOTHING and factory is not None: + raise TypeError( + "Must pass either `default` or `factory` but not both." + ) + + if factory is not None: + default = Factory(factory) + + if isinstance(default, Factory): + if default.takes_self: + raise ValueError( + "`takes_self` is not supported by default_if_none." + ) + + def default_if_none_converter(val): + if val is not None: + return val + + return default.factory() + + else: + + def default_if_none_converter(val): + if val is not None: + return val + + return default + + return default_if_none_converter + + +def to_bool(val): + """ + Convert "boolean" strings (e.g., from env. vars.) to real booleans. + + Values mapping to :code:`True`: + + - :code:`True` + - :code:`"true"` / :code:`"t"` + - :code:`"yes"` / :code:`"y"` + - :code:`"on"` + - :code:`"1"` + - :code:`1` + + Values mapping to :code:`False`: + + - :code:`False` + - :code:`"false"` / :code:`"f"` + - :code:`"no"` / :code:`"n"` + - :code:`"off"` + - :code:`"0"` + - :code:`0` + + :raises ValueError: for any other value. + + .. versionadded:: 21.3.0 + """ + if isinstance(val, str): + val = val.lower() + truthy = {True, "true", "t", "yes", "y", "on", "1", 1} + falsy = {False, "false", "f", "no", "n", "off", "0", 0} + try: + if val in truthy: + return True + if val in falsy: + return False + except TypeError: + # Raised when "val" is not hashable (e.g., lists) + pass + raise ValueError(f"Cannot convert value to bool: {val}") diff --git a/pythonFiles/lib/jedilsp/attr/converters.pyi b/pythonFiles/lib/jedilsp/attr/converters.pyi new file mode 100644 index 0000000000000..5abb49f6d5a8c --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/converters.pyi @@ -0,0 +1,13 @@ +from typing import Callable, TypeVar, overload + +from . import _ConverterType + +_T = TypeVar("_T") + +def pipe(*validators: _ConverterType) -> _ConverterType: ... +def optional(converter: _ConverterType) -> _ConverterType: ... +@overload +def default_if_none(default: _T) -> _ConverterType: ... +@overload +def default_if_none(*, factory: Callable[[], _T]) -> _ConverterType: ... +def to_bool(val: str) -> bool: ... diff --git a/pythonFiles/lib/jedilsp/attr/exceptions.py b/pythonFiles/lib/jedilsp/attr/exceptions.py new file mode 100644 index 0000000000000..5dc51e0a82b92 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/exceptions.py @@ -0,0 +1,92 @@ +# SPDX-License-Identifier: MIT + + +class FrozenError(AttributeError): + """ + A frozen/immutable instance or attribute have been attempted to be + modified. + + It mirrors the behavior of ``namedtuples`` by using the same error message + and subclassing `AttributeError`. + + .. versionadded:: 20.1.0 + """ + + msg = "can't set attribute" + args = [msg] + + +class FrozenInstanceError(FrozenError): + """ + A frozen instance has been attempted to be modified. + + .. versionadded:: 16.1.0 + """ + + +class FrozenAttributeError(FrozenError): + """ + A frozen attribute has been attempted to be modified. + + .. versionadded:: 20.1.0 + """ + + +class AttrsAttributeNotFoundError(ValueError): + """ + An ``attrs`` function couldn't find an attribute that the user asked for. + + .. versionadded:: 16.2.0 + """ + + +class NotAnAttrsClassError(ValueError): + """ + A non-``attrs`` class has been passed into an ``attrs`` function. + + .. versionadded:: 16.2.0 + """ + + +class DefaultAlreadySetError(RuntimeError): + """ + A default has been set using ``attr.ib()`` and is attempted to be reset + using the decorator. + + .. versionadded:: 17.1.0 + """ + + +class UnannotatedAttributeError(RuntimeError): + """ + A class with ``auto_attribs=True`` has an ``attr.ib()`` without a type + annotation. + + .. versionadded:: 17.3.0 + """ + + +class PythonTooOldError(RuntimeError): + """ + It was attempted to use an ``attrs`` feature that requires a newer Python + version. + + .. versionadded:: 18.2.0 + """ + + +class NotCallableError(TypeError): + """ + A ``attr.ib()`` requiring a callable has been set with a value + that is not callable. + + .. versionadded:: 19.2.0 + """ + + def __init__(self, msg, value): + super(TypeError, self).__init__(msg, value) + self.msg = msg + self.value = value + + def __str__(self): + return str(self.msg) diff --git a/pythonFiles/lib/jedilsp/attr/exceptions.pyi b/pythonFiles/lib/jedilsp/attr/exceptions.pyi new file mode 100644 index 0000000000000..f2680118b404d --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/exceptions.pyi @@ -0,0 +1,17 @@ +from typing import Any + +class FrozenError(AttributeError): + msg: str = ... + +class FrozenInstanceError(FrozenError): ... +class FrozenAttributeError(FrozenError): ... +class AttrsAttributeNotFoundError(ValueError): ... +class NotAnAttrsClassError(ValueError): ... +class DefaultAlreadySetError(RuntimeError): ... +class UnannotatedAttributeError(RuntimeError): ... +class PythonTooOldError(RuntimeError): ... + +class NotCallableError(TypeError): + msg: str = ... + value: Any = ... + def __init__(self, msg: str, value: Any) -> None: ... diff --git a/pythonFiles/lib/jedilsp/attr/filters.py b/pythonFiles/lib/jedilsp/attr/filters.py new file mode 100644 index 0000000000000..baa25e946528d --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/filters.py @@ -0,0 +1,51 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful filters for `attr.asdict`. +""" + +from ._make import Attribute + + +def _split_what(what): + """ + Returns a tuple of `frozenset`s of classes and attributes. + """ + return ( + frozenset(cls for cls in what if isinstance(cls, type)), + frozenset(cls for cls in what if isinstance(cls, Attribute)), + ) + + +def include(*what): + """ + Include *what*. + + :param what: What to include. + :type what: `list` of `type` or `attrs.Attribute`\\ s + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def include_(attribute, value): + return value.__class__ in cls or attribute in attrs + + return include_ + + +def exclude(*what): + """ + Exclude *what*. + + :param what: What to exclude. + :type what: `list` of classes or `attrs.Attribute`\\ s. + + :rtype: `callable` + """ + cls, attrs = _split_what(what) + + def exclude_(attribute, value): + return value.__class__ not in cls and attribute not in attrs + + return exclude_ diff --git a/pythonFiles/lib/jedilsp/attr/filters.pyi b/pythonFiles/lib/jedilsp/attr/filters.pyi new file mode 100644 index 0000000000000..993866865eab7 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/filters.pyi @@ -0,0 +1,6 @@ +from typing import Any, Union + +from . import Attribute, _FilterType + +def include(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... +def exclude(*what: Union[type, Attribute[Any]]) -> _FilterType[Any]: ... diff --git a/pythonFiles/lib/jedilsp/attr/py.typed b/pythonFiles/lib/jedilsp/attr/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/attr/setters.py b/pythonFiles/lib/jedilsp/attr/setters.py new file mode 100644 index 0000000000000..12ed6750df35b --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/setters.py @@ -0,0 +1,73 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly used hooks for on_setattr. +""" + + +from . import _config +from .exceptions import FrozenAttributeError + + +def pipe(*setters): + """ + Run all *setters* and return the return value of the last one. + + .. versionadded:: 20.1.0 + """ + + def wrapped_pipe(instance, attrib, new_value): + rv = new_value + + for setter in setters: + rv = setter(instance, attrib, rv) + + return rv + + return wrapped_pipe + + +def frozen(_, __, ___): + """ + Prevent an attribute to be modified. + + .. versionadded:: 20.1.0 + """ + raise FrozenAttributeError() + + +def validate(instance, attrib, new_value): + """ + Run *attrib*'s validator on *new_value* if it has one. + + .. versionadded:: 20.1.0 + """ + if _config._run_validators is False: + return new_value + + v = attrib.validator + if not v: + return new_value + + v(instance, attrib, new_value) + + return new_value + + +def convert(instance, attrib, new_value): + """ + Run *attrib*'s converter -- if it has one -- on *new_value* and return the + result. + + .. versionadded:: 20.1.0 + """ + c = attrib.converter + if c: + return c(new_value) + + return new_value + + +# Sentinel for disabling class-wide *on_setattr* hooks for certain attributes. +# autodata stopped working, so the docstring is inlined in the API docs. +NO_OP = object() diff --git a/pythonFiles/lib/jedilsp/attr/setters.pyi b/pythonFiles/lib/jedilsp/attr/setters.pyi new file mode 100644 index 0000000000000..72f7ce4761c34 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/setters.pyi @@ -0,0 +1,19 @@ +from typing import Any, NewType, NoReturn, TypeVar + +from . import Attribute, _OnSetAttrType + +_T = TypeVar("_T") + +def frozen( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> NoReturn: ... +def pipe(*setters: _OnSetAttrType) -> _OnSetAttrType: ... +def validate(instance: Any, attribute: Attribute[_T], new_value: _T) -> _T: ... + +# convert is allowed to return Any, because they can be chained using pipe. +def convert( + instance: Any, attribute: Attribute[Any], new_value: Any +) -> Any: ... + +_NoOpType = NewType("_NoOpType", object) +NO_OP: _NoOpType diff --git a/pythonFiles/lib/jedilsp/attr/validators.py b/pythonFiles/lib/jedilsp/attr/validators.py new file mode 100644 index 0000000000000..852ae965bdd04 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/validators.py @@ -0,0 +1,714 @@ +# SPDX-License-Identifier: MIT + +""" +Commonly useful validators. +""" + + +import operator +import re + +from contextlib import contextmanager + +from ._config import get_run_validators, set_run_validators +from ._make import _AndValidator, and_, attrib, attrs +from .converters import default_if_none +from .exceptions import NotCallableError + + +try: + Pattern = re.Pattern +except AttributeError: # Python <3.7 lacks a Pattern type. + Pattern = type(re.compile("")) + + +__all__ = [ + "and_", + "deep_iterable", + "deep_mapping", + "disabled", + "ge", + "get_disabled", + "gt", + "in_", + "instance_of", + "is_callable", + "le", + "lt", + "matches_re", + "max_len", + "min_len", + "not_", + "optional", + "provides", + "set_disabled", +] + + +def set_disabled(disabled): + """ + Globally disable or enable running validators. + + By default, they are run. + + :param disabled: If ``True``, disable running all validators. + :type disabled: bool + + .. warning:: + + This function is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(not disabled) + + +def get_disabled(): + """ + Return a bool indicating whether validators are currently disabled or not. + + :return: ``True`` if validators are currently disabled. + :rtype: bool + + .. versionadded:: 21.3.0 + """ + return not get_run_validators() + + +@contextmanager +def disabled(): + """ + Context manager that disables running validators within its context. + + .. warning:: + + This context manager is not thread-safe! + + .. versionadded:: 21.3.0 + """ + set_run_validators(False) + try: + yield + finally: + set_run_validators(True) + + +@attrs(repr=False, slots=True, hash=True) +class _InstanceOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not isinstance(value, self.type): + raise TypeError( + "'{name}' must be {type!r} (got {value!r} that is a " + "{actual!r}).".format( + name=attr.name, + type=self.type, + actual=value.__class__, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def instance_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `isinstance` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of type + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _InstanceOfValidator(type) + + +@attrs(repr=False, frozen=True, slots=True) +class _MatchesReValidator: + pattern = attrib() + match_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.match_func(value): + raise ValueError( + "'{name}' must match regex {pattern!r}" + " ({value!r} doesn't)".format( + name=attr.name, pattern=self.pattern.pattern, value=value + ), + attr, + self.pattern, + value, + ) + + def __repr__(self): + return "".format( + pattern=self.pattern + ) + + +def matches_re(regex, flags=0, func=None): + r""" + A validator that raises `ValueError` if the initializer is called + with a string that doesn't match *regex*. + + :param regex: a regex string or precompiled pattern to match against + :param int flags: flags that will be passed to the underlying re function + (default 0) + :param callable func: which underlying `re` function to call. Valid options + are `re.fullmatch`, `re.search`, and `re.match`; the default ``None`` + means `re.fullmatch`. For performance reasons, the pattern is always + precompiled using `re.compile`. + + .. versionadded:: 19.2.0 + .. versionchanged:: 21.3.0 *regex* can be a pre-compiled pattern. + """ + valid_funcs = (re.fullmatch, None, re.search, re.match) + if func not in valid_funcs: + raise ValueError( + "'func' must be one of {}.".format( + ", ".join( + sorted( + e and e.__name__ or "None" for e in set(valid_funcs) + ) + ) + ) + ) + + if isinstance(regex, Pattern): + if flags: + raise TypeError( + "'flags' can only be used with a string pattern; " + "pass flags to re.compile() instead" + ) + pattern = regex + else: + pattern = re.compile(regex, flags) + + if func is re.match: + match_func = pattern.match + elif func is re.search: + match_func = pattern.search + else: + match_func = pattern.fullmatch + + return _MatchesReValidator(pattern, match_func) + + +@attrs(repr=False, slots=True, hash=True) +class _ProvidesValidator: + interface = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.interface.providedBy(value): + raise TypeError( + "'{name}' must provide {interface!r} which {value!r} " + "doesn't.".format( + name=attr.name, interface=self.interface, value=value + ), + attr, + self.interface, + value, + ) + + def __repr__(self): + return "".format( + interface=self.interface + ) + + +def provides(interface): + """ + A validator that raises a `TypeError` if the initializer is called + with an object that does not provide the requested *interface* (checks are + performed using ``interface.providedBy(value)`` (see `zope.interface + `_). + + :param interface: The interface to check for. + :type interface: ``zope.interface.Interface`` + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected interface, and the + value it got. + """ + return _ProvidesValidator(interface) + + +@attrs(repr=False, slots=True, hash=True) +class _OptionalValidator: + validator = attrib() + + def __call__(self, inst, attr, value): + if value is None: + return + + self.validator(inst, attr, value) + + def __repr__(self): + return "".format( + what=repr(self.validator) + ) + + +def optional(validator): + """ + A validator that makes an attribute optional. An optional attribute is one + which can be set to ``None`` in addition to satisfying the requirements of + the sub-validator. + + :param validator: A validator (or a list of validators) that is used for + non-``None`` values. + :type validator: callable or `list` of callables. + + .. versionadded:: 15.1.0 + .. versionchanged:: 17.1.0 *validator* can be a list of validators. + """ + if isinstance(validator, list): + return _OptionalValidator(_AndValidator(validator)) + return _OptionalValidator(validator) + + +@attrs(repr=False, slots=True, hash=True) +class _InValidator: + options = attrib() + + def __call__(self, inst, attr, value): + try: + in_options = value in self.options + except TypeError: # e.g. `1 in "abc"` + in_options = False + + if not in_options: + raise ValueError( + "'{name}' must be in {options!r} (got {value!r})".format( + name=attr.name, options=self.options, value=value + ), + attr, + self.options, + value, + ) + + def __repr__(self): + return "".format( + options=self.options + ) + + +def in_(options): + """ + A validator that raises a `ValueError` if the initializer is called + with a value that does not belong in the options provided. The check is + performed using ``value in options``. + + :param options: Allowed options. + :type options: list, tuple, `enum.Enum`, ... + + :raises ValueError: With a human readable error message, the attribute (of + type `attrs.Attribute`), the expected options, and the value it + got. + + .. versionadded:: 17.1.0 + .. versionchanged:: 22.1.0 + The ValueError was incomplete until now and only contained the human + readable error message. Now it contains all the information that has + been promised since 17.1.0. + """ + return _InValidator(options) + + +@attrs(repr=False, slots=False, hash=True) +class _IsCallableValidator: + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not callable(value): + message = ( + "'{name}' must be callable " + "(got {value!r} that is a {actual!r})." + ) + raise NotCallableError( + msg=message.format( + name=attr.name, value=value, actual=value.__class__ + ), + value=value, + ) + + def __repr__(self): + return "" + + +def is_callable(): + """ + A validator that raises a `attr.exceptions.NotCallableError` if the + initializer is called with a value for this particular attribute + that is not callable. + + .. versionadded:: 19.1.0 + + :raises `attr.exceptions.NotCallableError`: With a human readable error + message containing the attribute (`attrs.Attribute`) name, + and the value it got. + """ + return _IsCallableValidator() + + +@attrs(repr=False, slots=True, hash=True) +class _DeepIterable: + member_validator = attrib(validator=is_callable()) + iterable_validator = attrib( + default=None, validator=optional(is_callable()) + ) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.iterable_validator is not None: + self.iterable_validator(inst, attr, value) + + for member in value: + self.member_validator(inst, attr, member) + + def __repr__(self): + iterable_identifier = ( + "" + if self.iterable_validator is None + else f" {self.iterable_validator!r}" + ) + return ( + "" + ).format( + iterable_identifier=iterable_identifier, + member=self.member_validator, + ) + + +def deep_iterable(member_validator, iterable_validator=None): + """ + A validator that performs deep validation of an iterable. + + :param member_validator: Validator(s) to apply to iterable members + :param iterable_validator: Validator to apply to iterable itself + (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + if isinstance(member_validator, (list, tuple)): + member_validator = and_(*member_validator) + return _DeepIterable(member_validator, iterable_validator) + + +@attrs(repr=False, slots=True, hash=True) +class _DeepMapping: + key_validator = attrib(validator=is_callable()) + value_validator = attrib(validator=is_callable()) + mapping_validator = attrib(default=None, validator=optional(is_callable())) + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if self.mapping_validator is not None: + self.mapping_validator(inst, attr, value) + + for key in value: + self.key_validator(inst, attr, key) + self.value_validator(inst, attr, value[key]) + + def __repr__(self): + return ( + "" + ).format(key=self.key_validator, value=self.value_validator) + + +def deep_mapping(key_validator, value_validator, mapping_validator=None): + """ + A validator that performs deep validation of a dictionary. + + :param key_validator: Validator to apply to dictionary keys + :param value_validator: Validator to apply to dictionary values + :param mapping_validator: Validator to apply to top-level mapping + attribute (optional) + + .. versionadded:: 19.1.0 + + :raises TypeError: if any sub-validators fail + """ + return _DeepMapping(key_validator, value_validator, mapping_validator) + + +@attrs(repr=False, frozen=True, slots=True) +class _NumberValidator: + bound = attrib() + compare_op = attrib() + compare_func = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not self.compare_func(value, self.bound): + raise ValueError( + "'{name}' must be {op} {bound}: {value}".format( + name=attr.name, + op=self.compare_op, + bound=self.bound, + value=value, + ) + ) + + def __repr__(self): + return "".format( + op=self.compare_op, bound=self.bound + ) + + +def lt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number larger or equal to *val*. + + :param val: Exclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<", operator.lt) + + +def le(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number greater than *val*. + + :param val: Inclusive upper bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, "<=", operator.le) + + +def ge(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller than *val*. + + :param val: Inclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">=", operator.ge) + + +def gt(val): + """ + A validator that raises `ValueError` if the initializer is called + with a number smaller or equal to *val*. + + :param val: Exclusive lower bound for values + + .. versionadded:: 21.3.0 + """ + return _NumberValidator(val, ">", operator.gt) + + +@attrs(repr=False, frozen=True, slots=True) +class _MaxLengthValidator: + max_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) > self.max_length: + raise ValueError( + "Length of '{name}' must be <= {max}: {len}".format( + name=attr.name, max=self.max_length, len=len(value) + ) + ) + + def __repr__(self): + return f"" + + +def max_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is longer than *length*. + + :param int length: Maximum length of the string or iterable + + .. versionadded:: 21.3.0 + """ + return _MaxLengthValidator(length) + + +@attrs(repr=False, frozen=True, slots=True) +class _MinLengthValidator: + min_length = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if len(value) < self.min_length: + raise ValueError( + "Length of '{name}' must be => {min}: {len}".format( + name=attr.name, min=self.min_length, len=len(value) + ) + ) + + def __repr__(self): + return f"" + + +def min_len(length): + """ + A validator that raises `ValueError` if the initializer is called + with a string or iterable that is shorter than *length*. + + :param int length: Minimum length of the string or iterable + + .. versionadded:: 22.1.0 + """ + return _MinLengthValidator(length) + + +@attrs(repr=False, slots=True, hash=True) +class _SubclassOfValidator: + type = attrib() + + def __call__(self, inst, attr, value): + """ + We use a callable class to be able to change the ``__repr__``. + """ + if not issubclass(value, self.type): + raise TypeError( + "'{name}' must be a subclass of {type!r} " + "(got {value!r}).".format( + name=attr.name, + type=self.type, + value=value, + ), + attr, + self.type, + value, + ) + + def __repr__(self): + return "".format( + type=self.type + ) + + +def _subclass_of(type): + """ + A validator that raises a `TypeError` if the initializer is called + with a wrong type for this particular attribute (checks are performed using + `issubclass` therefore it's also valid to pass a tuple of types). + + :param type: The type to check for. + :type type: type or tuple of types + + :raises TypeError: With a human readable error message, the attribute + (of type `attrs.Attribute`), the expected type, and the value it + got. + """ + return _SubclassOfValidator(type) + + +@attrs(repr=False, slots=True, hash=True) +class _NotValidator: + validator = attrib() + msg = attrib( + converter=default_if_none( + "not_ validator child '{validator!r}' " + "did not raise a captured error" + ) + ) + exc_types = attrib( + validator=deep_iterable( + member_validator=_subclass_of(Exception), + iterable_validator=instance_of(tuple), + ), + ) + + def __call__(self, inst, attr, value): + try: + self.validator(inst, attr, value) + except self.exc_types: + pass # suppress error to invert validity + else: + raise ValueError( + self.msg.format( + validator=self.validator, + exc_types=self.exc_types, + ), + attr, + self.validator, + value, + self.exc_types, + ) + + def __repr__(self): + return ( + "" + ).format( + what=self.validator, + exc_types=self.exc_types, + ) + + +def not_(validator, *, msg=None, exc_types=(ValueError, TypeError)): + """ + A validator that wraps and logically 'inverts' the validator passed to it. + It will raise a `ValueError` if the provided validator *doesn't* raise a + `ValueError` or `TypeError` (by default), and will suppress the exception + if the provided validator *does*. + + Intended to be used with existing validators to compose logic without + needing to create inverted variants, for example, ``not_(in_(...))``. + + :param validator: A validator to be logically inverted. + :param msg: Message to raise if validator fails. + Formatted with keys ``exc_types`` and ``validator``. + :type msg: str + :param exc_types: Exception type(s) to capture. + Other types raised by child validators will not be intercepted and + pass through. + + :raises ValueError: With a human readable error message, + the attribute (of type `attrs.Attribute`), + the validator that failed to raise an exception, + the value it got, + and the expected exception types. + + .. versionadded:: 22.2.0 + """ + try: + exc_types = tuple(exc_types) + except TypeError: + exc_types = (exc_types,) + return _NotValidator(validator, msg, exc_types) diff --git a/pythonFiles/lib/jedilsp/attr/validators.pyi b/pythonFiles/lib/jedilsp/attr/validators.pyi new file mode 100644 index 0000000000000..fd9206de9bed3 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attr/validators.pyi @@ -0,0 +1,86 @@ +from typing import ( + Any, + AnyStr, + Callable, + Container, + ContextManager, + Iterable, + List, + Mapping, + Match, + Optional, + Pattern, + Tuple, + Type, + TypeVar, + Union, + overload, +) + +from . import _ValidatorType +from . import _ValidatorArgType + +_T = TypeVar("_T") +_T1 = TypeVar("_T1") +_T2 = TypeVar("_T2") +_T3 = TypeVar("_T3") +_I = TypeVar("_I", bound=Iterable) +_K = TypeVar("_K") +_V = TypeVar("_V") +_M = TypeVar("_M", bound=Mapping) + +def set_disabled(run: bool) -> None: ... +def get_disabled() -> bool: ... +def disabled() -> ContextManager[None]: ... + +# To be more precise on instance_of use some overloads. +# If there are more than 3 items in the tuple then we fall back to Any +@overload +def instance_of(type: Type[_T]) -> _ValidatorType[_T]: ... +@overload +def instance_of(type: Tuple[Type[_T]]) -> _ValidatorType[_T]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2]] +) -> _ValidatorType[Union[_T1, _T2]]: ... +@overload +def instance_of( + type: Tuple[Type[_T1], Type[_T2], Type[_T3]] +) -> _ValidatorType[Union[_T1, _T2, _T3]]: ... +@overload +def instance_of(type: Tuple[type, ...]) -> _ValidatorType[Any]: ... +def provides(interface: Any) -> _ValidatorType[Any]: ... +def optional( + validator: Union[_ValidatorType[_T], List[_ValidatorType[_T]]] +) -> _ValidatorType[Optional[_T]]: ... +def in_(options: Container[_T]) -> _ValidatorType[_T]: ... +def and_(*validators: _ValidatorType[_T]) -> _ValidatorType[_T]: ... +def matches_re( + regex: Union[Pattern[AnyStr], AnyStr], + flags: int = ..., + func: Optional[ + Callable[[AnyStr, AnyStr, int], Optional[Match[AnyStr]]] + ] = ..., +) -> _ValidatorType[AnyStr]: ... +def deep_iterable( + member_validator: _ValidatorArgType[_T], + iterable_validator: Optional[_ValidatorType[_I]] = ..., +) -> _ValidatorType[_I]: ... +def deep_mapping( + key_validator: _ValidatorType[_K], + value_validator: _ValidatorType[_V], + mapping_validator: Optional[_ValidatorType[_M]] = ..., +) -> _ValidatorType[_M]: ... +def is_callable() -> _ValidatorType[_T]: ... +def lt(val: _T) -> _ValidatorType[_T]: ... +def le(val: _T) -> _ValidatorType[_T]: ... +def ge(val: _T) -> _ValidatorType[_T]: ... +def gt(val: _T) -> _ValidatorType[_T]: ... +def max_len(length: int) -> _ValidatorType[_T]: ... +def min_len(length: int) -> _ValidatorType[_T]: ... +def not_( + validator: _ValidatorType[_T], + *, + msg: Optional[str] = None, + exc_types: Union[Type[Exception], Iterable[Type[Exception]]] = ... +) -> _ValidatorType[_T]: ... diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/INSTALLER b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/INSTALLER new file mode 100644 index 0000000000000..a1b589e38a320 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/LICENSE b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/LICENSE new file mode 100644 index 0000000000000..2bd6453d255e1 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2015 Hynek Schlawack and the attrs contributors + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/METADATA b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/METADATA new file mode 100644 index 0000000000000..0f71b57a0cfa9 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/METADATA @@ -0,0 +1,278 @@ +Metadata-Version: 2.1 +Name: attrs +Version: 22.2.0 +Summary: Classes Without Boilerplate +Home-page: https://www.attrs.org/ +Author: Hynek Schlawack +Author-email: hs@ox.cx +Maintainer: Hynek Schlawack +Maintainer-email: hs@ox.cx +License: MIT +Project-URL: Documentation, https://www.attrs.org/ +Project-URL: Changelog, https://www.attrs.org/en/stable/changelog.html +Project-URL: Bug Tracker, https://github.com/python-attrs/attrs/issues +Project-URL: Source Code, https://github.com/python-attrs/attrs +Project-URL: Funding, https://github.com/sponsors/hynek +Project-URL: Tidelift, https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=pypi +Project-URL: Ko-fi, https://ko-fi.com/the_hynek +Keywords: class,attribute,boilerplate,dataclass +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: Natural Language :: English +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.6 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Classifier: Programming Language :: Python :: 3.11 +Classifier: Programming Language :: Python :: Implementation :: CPython +Classifier: Programming Language :: Python :: Implementation :: PyPy +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Requires-Python: >=3.6 +Description-Content-Type: text/markdown +License-File: LICENSE +Provides-Extra: cov +Requires-Dist: attrs[tests] ; extra == 'cov' +Requires-Dist: coverage-enable-subprocess ; extra == 'cov' +Requires-Dist: coverage[toml] (>=5.3) ; extra == 'cov' +Provides-Extra: dev +Requires-Dist: attrs[docs,tests] ; extra == 'dev' +Provides-Extra: docs +Requires-Dist: furo ; extra == 'docs' +Requires-Dist: sphinx ; extra == 'docs' +Requires-Dist: myst-parser ; extra == 'docs' +Requires-Dist: zope.interface ; extra == 'docs' +Requires-Dist: sphinx-notfound-page ; extra == 'docs' +Requires-Dist: sphinxcontrib-towncrier ; extra == 'docs' +Requires-Dist: towncrier ; extra == 'docs' +Provides-Extra: tests +Requires-Dist: attrs[tests-no-zope] ; extra == 'tests' +Requires-Dist: zope.interface ; extra == 'tests' +Provides-Extra: tests-no-zope +Requires-Dist: hypothesis ; extra == 'tests-no-zope' +Requires-Dist: pympler ; extra == 'tests-no-zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests-no-zope' +Requires-Dist: pytest-xdist[psutil] ; extra == 'tests-no-zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests-no-zope' +Requires-Dist: mypy (<0.990,>=0.971) ; (platform_python_implementation == "CPython") and extra == 'tests-no-zope' +Requires-Dist: pytest-mypy-plugins ; (platform_python_implementation == "CPython" and python_version < "3.11") and extra == 'tests-no-zope' +Provides-Extra: tests_no_zope +Requires-Dist: hypothesis ; extra == 'tests_no_zope' +Requires-Dist: pympler ; extra == 'tests_no_zope' +Requires-Dist: pytest (>=4.3.0) ; extra == 'tests_no_zope' +Requires-Dist: pytest-xdist[psutil] ; extra == 'tests_no_zope' +Requires-Dist: cloudpickle ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' +Requires-Dist: mypy (<0.990,>=0.971) ; (platform_python_implementation == "CPython") and extra == 'tests_no_zope' +Requires-Dist: pytest-mypy-plugins ; (platform_python_implementation == "CPython" and python_version < "3.11") and extra == 'tests_no_zope' + +

+ + attrs + +

+ + +

+ + Documentation + + + License: MIT + + + + + + + Downloads per month + + DOI +

+ + + +*attrs* is the Python package that will bring back the **joy** of **writing classes** by relieving you from the drudgery of implementing object protocols (aka [dunder methods](https://www.attrs.org/en/latest/glossary.html#term-dunder-methods)). +[Trusted by NASA](https://docs.github.com/en/account-and-profile/setting-up-and-managing-your-github-profile/customizing-your-profile/personalizing-your-profile#list-of-qualifying-repositories-for-mars-2020-helicopter-contributor-achievement) for Mars missions since 2020! + +Its main goal is to help you to write **concise** and **correct** software without slowing down your code. + + +## Sponsors + +*attrs* would not be possible without our [amazing sponsors](https://github.com/sponsors/hynek). +Especially those generously supporting us at the *The Organization* tier and higher: + +

+ + + + + + + + + + + + + + + +

+ +

+ Please consider joining them to help make attrs’s maintenance more sustainable! +

+ + + +## Example + +*attrs* gives you a class decorator and a way to declaratively define the attributes on that class: + + + +```pycon +>>> from attrs import asdict, define, make_class, Factory + +>>> @define +... class SomeClass: +... a_number: int = 42 +... list_of_numbers: list[int] = Factory(list) +... +... def hard_math(self, another_number): +... return self.a_number + sum(self.list_of_numbers) * another_number + + +>>> sc = SomeClass(1, [1, 2, 3]) +>>> sc +SomeClass(a_number=1, list_of_numbers=[1, 2, 3]) + +>>> sc.hard_math(3) +19 +>>> sc == SomeClass(1, [1, 2, 3]) +True +>>> sc != SomeClass(2, [3, 2, 1]) +True + +>>> asdict(sc) +{'a_number': 1, 'list_of_numbers': [1, 2, 3]} + +>>> SomeClass() +SomeClass(a_number=42, list_of_numbers=[]) + +>>> C = make_class("C", ["a", "b"]) +>>> C("foo", "bar") +C(a='foo', b='bar') +``` + +After *declaring* your attributes, *attrs* gives you: + +- a concise and explicit overview of the class's attributes, +- a nice human-readable `__repr__`, +- equality-checking methods, +- an initializer, +- and much more, + +*without* writing dull boilerplate code again and again and *without* runtime performance penalties. + +**Hate type annotations**!? +No problem! +Types are entirely **optional** with *attrs*. +Simply assign `attrs.field()` to the attributes instead of annotating them with types. + +--- + +This example uses *attrs*'s modern APIs that have been introduced in version 20.1.0, and the *attrs* package import name that has been added in version 21.3.0. +The classic APIs (`@attr.s`, `attr.ib`, plus their serious-business aliases) and the `attr` package import name will remain **indefinitely**. + +Please check out [*On The Core API Names*](https://www.attrs.org/en/latest/names.html) for a more in-depth explanation. + + +## Data Classes + +On the tin, *attrs* might remind you of `dataclasses` (and indeed, `dataclasses` [are a descendant](https://hynek.me/articles/import-attrs/) of *attrs*). +In practice it does a lot more and is more flexible. +For instance it allows you to define [special handling of NumPy arrays for equality checks](https://www.attrs.org/en/stable/comparison.html#customization), or allows more ways to [plug into the initialization process](https://www.attrs.org/en/stable/init.html#hooking-yourself-into-initialization). + +For more details, please refer to our [comparison page](https://www.attrs.org/en/stable/why.html#data-classes). + + +## Project Information + +- [**Changelog**](https://www.attrs.org/en/stable/changelog.html) +- [**Documentation**](https://www.attrs.org/) +- [**PyPI**](https://pypi.org/project/attrs/) +- [**Source Code**](https://github.com/python-attrs/attrs) +- [**Contributing**](https://github.com/python-attrs/attrs/blob/main/.github/CONTRIBUTING.md) +- [**Third-party Extensions**](https://github.com/python-attrs/attrs/wiki/Extensions-to-attrs) +- **License**: [MIT](https://www.attrs.org/en/latest/license.html) +- **Get Help**: please use the `python-attrs` tag on [StackOverflow](https://stackoverflow.com/questions/tagged/python-attrs) +- **Supported Python Versions**: 3.6 and later + + +### *attrs* for Enterprise + +Available as part of the Tidelift Subscription. + +The maintainers of *attrs* and thousands of other packages are working with Tidelift to deliver commercial support and maintenance for the open source packages you use to build your applications. +Save time, reduce risk, and improve code health, while paying the maintainers of the exact packages you use. +[Learn more.](https://tidelift.com/subscription/pkg/pypi-attrs?utm_source=pypi-attrs&utm_medium=referral&utm_campaign=enterprise&utm_term=repo) + + +## Changes in This Release + +### Backwards-incompatible Changes + +- Python 3.5 is not supported anymore. + [#988](https://github.com/python-attrs/attrs/issues/988) + + +### Deprecations + +- Python 3.6 is now deprecated and support will be removed in the next release. + [#1017](https://github.com/python-attrs/attrs/issues/1017) + + +### Changes + +- `attrs.field()` now supports an *alias* option for explicit `__init__` argument names. + + Get `__init__` signatures matching any taste, peculiar or plain! + The [PEP 681 compatible](https://peps.python.org/pep-0681/#field-specifier-parameters) *alias* option can be use to override private attribute name mangling, or add other arbitrary field argument name overrides. + [#950](https://github.com/python-attrs/attrs/issues/950) +- `attrs.NOTHING` is now an enum value, making it possible to use with e.g. [`typing.Literal`](https://docs.python.org/3/library/typing.html#typing.Literal). + [#983](https://github.com/python-attrs/attrs/issues/983) +- Added missing re-import of `attr.AttrsInstance` to the `attrs` namespace. + [#987](https://github.com/python-attrs/attrs/issues/987) +- Fix slight performance regression in classes with custom `__setattr__` and speedup even more. + [#991](https://github.com/python-attrs/attrs/issues/991) +- Class-creation performance improvements by switching performance-sensitive templating operations to f-strings. + + You can expect an improvement of about 5% -- even for very simple classes. + [#995](https://github.com/python-attrs/attrs/issues/995) +- `attrs.has()` is now a [`TypeGuard`](https://docs.python.org/3/library/typing.html#typing.TypeGuard) for `AttrsInstance`. + That means that type checkers know a class is an instance of an `attrs` class if you check it using `attrs.has()` (or `attr.has()`) first. + [#997](https://github.com/python-attrs/attrs/issues/997) +- Made `attrs.AttrsInstance` stub available at runtime and fixed type errors related to the usage of `attrs.AttrsInstance` in *Pyright*. + [#999](https://github.com/python-attrs/attrs/issues/999) +- On Python 3.10 and later, call [`abc.update_abstractmethods()`](https://docs.python.org/3/library/abc.html#abc.update_abstractmethods) on dict classes after creation. + This improves the detection of abstractness. + [#1001](https://github.com/python-attrs/attrs/issues/1001) +- *attrs*'s pickling methods now use dicts instead of tuples. + That is safer and more robust across different versions of a class. + [#1009](https://github.com/python-attrs/attrs/issues/1009) +- Added `attrs.validators.not_(wrapped_validator)` to logically invert *wrapped_validator* by accepting only values where *wrapped_validator* rejects the value with a `ValueError` or `TypeError` (by default, exception types configurable). + [#1010](https://github.com/python-attrs/attrs/issues/1010) +- The type stubs for `attrs.cmp_using()` now have default values. + [#1027](https://github.com/python-attrs/attrs/issues/1027) +- To conform with [PEP 681](https://peps.python.org/pep-0681/), `attr.s()` and `attrs.define()` now accept *unsafe_hash* in addition to *hash*. + [#1065](https://github.com/python-attrs/attrs/issues/1065) + +--- + +[Full changelog](https://www.attrs.org/en/stable/changelog.html) diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/RECORD b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/RECORD new file mode 100644 index 0000000000000..9e7b5e48494aa --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/RECORD @@ -0,0 +1,57 @@ +attr/__init__.py,sha256=-lJ5CXKE5yKk97Z2HSMRJFiGz1TdXLU9q4Ysb2Id4IQ,1947 +attr/__init__.pyi,sha256=qOjUNync7Lq8NLk30l_DRTh1h62mMl1e4VnqBgY2x24,15831 +attr/__pycache__/__init__.cpython-39.pyc,, +attr/__pycache__/_cmp.cpython-39.pyc,, +attr/__pycache__/_compat.cpython-39.pyc,, +attr/__pycache__/_config.cpython-39.pyc,, +attr/__pycache__/_funcs.cpython-39.pyc,, +attr/__pycache__/_make.cpython-39.pyc,, +attr/__pycache__/_next_gen.cpython-39.pyc,, +attr/__pycache__/_version_info.cpython-39.pyc,, +attr/__pycache__/converters.cpython-39.pyc,, +attr/__pycache__/exceptions.cpython-39.pyc,, +attr/__pycache__/filters.cpython-39.pyc,, +attr/__pycache__/setters.cpython-39.pyc,, +attr/__pycache__/validators.cpython-39.pyc,, +attr/_cmp.py,sha256=mwr1ImJlkFL9Zi0E55-90IfchMKr94ko6e-p4y__M_4,4094 +attr/_cmp.pyi,sha256=sGQmOM0w3_K4-X8cTXR7g0Hqr290E8PTObA9JQxWQqc,399 +attr/_compat.py,sha256=Da-SeMicy7SkTKCCwKtfX41sUMf0o54tK96zsu1qE60,5435 +attr/_config.py,sha256=5W8lgRePuIOWu1ZuqF1899e2CmXGc95-ipwTpF1cEU4,826 +attr/_funcs.py,sha256=0EqqZgKNZBk4PXQvCF_fuWWAz14mSdZpk4UBZpX_fDQ,14545 +attr/_make.py,sha256=MdYHoWXJ2WlQNZPMTX4gkBO06QgPyb3qwSWSxaJ6QVg,96087 +attr/_next_gen.py,sha256=95DRKAfIuHbcwO9W_yWtRsHt3IbfxbAgpyB6agxbghw,6059 +attr/_typing_compat.pyi,sha256=XDP54TUn-ZKhD62TOQebmzrwFyomhUCoGRpclb6alRA,469 +attr/_version_info.py,sha256=exSqb3b5E-fMSsgZAlEw9XcLpEgobPORCZpcaEglAM4,2121 +attr/_version_info.pyi,sha256=x_M3L3WuB7r_ULXAWjx959udKQ4HLB8l-hsc1FDGNvk,209 +attr/converters.py,sha256=xfGVSPRgWGcym6N5FZM9fyfvCQePqFyApWeC5BXKvoM,3602 +attr/converters.pyi,sha256=jKlpHBEt6HVKJvgrMFJRrHq8p61GXg4-Nd5RZWKJX7M,406 +attr/exceptions.py,sha256=ZGEMLv0CDY1TOtj49OF84myejOn-LCCXAKGIKalKkVU,1915 +attr/exceptions.pyi,sha256=zZq8bCUnKAy9mDtBEw42ZhPhAUIHoTKedDQInJD883M,539 +attr/filters.py,sha256=aZep54h8-4ZYV5lmZ3Dx2mqeQH4cMx6tuCmCylLNbEU,1038 +attr/filters.pyi,sha256=_Sm80jGySETX_Clzdkon5NHVjQWRl3Y3liQKZX1czXc,215 +attr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attr/setters.py,sha256=pbCZQ-pE6ZxjDqZfWWUhUFefXtpekIU4qS_YDMLPQ50,1400 +attr/setters.pyi,sha256=pyY8TVNBu8TWhOldv_RxHzmGvdgFQH981db70r0fn5I,567 +attr/validators.py,sha256=gBJAzoo1UNDRTG9-kE0LUoUTgDr2slJymPxb6-UPt7c,20501 +attr/validators.pyi,sha256=ZbJDuF6Kom-L6ym9Cc6eT370S_a7z8YhWmP7z35ayXc,2538 +attrs-22.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +attrs-22.2.0.dist-info/LICENSE,sha256=iCEVyV38KvHutnFPjsbVy8q_Znyv-HKfQkINpj9xTp8,1109 +attrs-22.2.0.dist-info/METADATA,sha256=jgQypZGK_yplaxCh1S1gnQ_NZYKk-EwtfWygdZ_NgIc,13531 +attrs-22.2.0.dist-info/RECORD,, +attrs-22.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs-22.2.0.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +attrs-22.2.0.dist-info/top_level.txt,sha256=AGbmKnOtYpdkLRsDRQVSBIwfL32pAQ6BSo1mt-BxI7M,11 +attrs/__init__.py,sha256=90bKLoqyIHpMjnzJuXSar1dH5anUQXHqT7-yI1Qzg00,1149 +attrs/__init__.pyi,sha256=KMHncABV_sq4pubLAli-iOQjc9EM3g9y2r6M9V71_vY,2148 +attrs/__pycache__/__init__.cpython-39.pyc,, +attrs/__pycache__/converters.cpython-39.pyc,, +attrs/__pycache__/exceptions.cpython-39.pyc,, +attrs/__pycache__/filters.cpython-39.pyc,, +attrs/__pycache__/setters.cpython-39.pyc,, +attrs/__pycache__/validators.cpython-39.pyc,, +attrs/converters.py,sha256=fCBEdlYWcmI3sCnpUk2pz22GYtXzqTkp6NeOpdI64PY,70 +attrs/exceptions.py,sha256=SlDli6AY77f6ny-H7oy98OkQjsrw-D_supEuErIVYkE,70 +attrs/filters.py,sha256=dc_dNey29kH6KLU1mT2Dakq7tZ3kBfzEGwzOmDzw1F8,67 +attrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +attrs/setters.py,sha256=oKw51C72Hh45wTwYvDHJP9kbicxiMhMR4Y5GvdpKdHQ,67 +attrs/validators.py,sha256=4ag1SyVD2Hm3PYKiNG_NOtR_e7f81Hr6GiNl4YvXo4Q,70 diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/REQUESTED b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/REQUESTED new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/WHEEL b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/WHEEL new file mode 100644 index 0000000000000..57e3d840d59a6 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/top_level.txt b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/top_level.txt new file mode 100644 index 0000000000000..eca8ba9f00d07 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs-22.2.0.dist-info/top_level.txt @@ -0,0 +1,2 @@ +attr +attrs diff --git a/pythonFiles/lib/jedilsp/attrs/__init__.py b/pythonFiles/lib/jedilsp/attrs/__init__.py new file mode 100644 index 0000000000000..81dd6b2f0e8c7 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/__init__.py @@ -0,0 +1,72 @@ +# SPDX-License-Identifier: MIT + +from attr import ( + NOTHING, + Attribute, + AttrsInstance, + Factory, + __author__, + __copyright__, + __description__, + __doc__, + __email__, + __license__, + __title__, + __url__, + __version__, + __version_info__, + assoc, + cmp_using, + define, + evolve, + field, + fields, + fields_dict, + frozen, + has, + make_class, + mutable, + resolve_types, + validate, +) +from attr._next_gen import asdict, astuple + +from . import converters, exceptions, filters, setters, validators + + +__all__ = [ + "__author__", + "__copyright__", + "__description__", + "__doc__", + "__email__", + "__license__", + "__title__", + "__url__", + "__version__", + "__version_info__", + "asdict", + "assoc", + "astuple", + "Attribute", + "AttrsInstance", + "cmp_using", + "converters", + "define", + "evolve", + "exceptions", + "Factory", + "field", + "fields_dict", + "fields", + "filters", + "frozen", + "has", + "make_class", + "mutable", + "NOTHING", + "resolve_types", + "setters", + "validate", + "validators", +] diff --git a/pythonFiles/lib/jedilsp/attrs/__init__.pyi b/pythonFiles/lib/jedilsp/attrs/__init__.pyi new file mode 100644 index 0000000000000..4ea64d8ea1690 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/__init__.pyi @@ -0,0 +1,67 @@ +from typing import ( + Any, + Callable, + Dict, + Mapping, + Optional, + Sequence, + Tuple, + Type, +) + +# Because we need to type our own stuff, we have to make everything from +# attr explicitly public too. +from attr import __author__ as __author__ +from attr import __copyright__ as __copyright__ +from attr import __description__ as __description__ +from attr import __email__ as __email__ +from attr import __license__ as __license__ +from attr import __title__ as __title__ +from attr import __url__ as __url__ +from attr import __version__ as __version__ +from attr import __version_info__ as __version_info__ +from attr import _FilterType +from attr import assoc as assoc +from attr import Attribute as Attribute +from attr import AttrsInstance as AttrsInstance +from attr import cmp_using as cmp_using +from attr import converters as converters +from attr import define as define +from attr import evolve as evolve +from attr import exceptions as exceptions +from attr import Factory as Factory +from attr import field as field +from attr import fields as fields +from attr import fields_dict as fields_dict +from attr import filters as filters +from attr import frozen as frozen +from attr import has as has +from attr import make_class as make_class +from attr import mutable as mutable +from attr import NOTHING as NOTHING +from attr import resolve_types as resolve_types +from attr import setters as setters +from attr import validate as validate +from attr import validators as validators + +# TODO: see definition of attr.asdict/astuple +def asdict( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + dict_factory: Type[Mapping[Any, Any]] = ..., + retain_collection_types: bool = ..., + value_serializer: Optional[ + Callable[[type, Attribute[Any], Any], Any] + ] = ..., + tuple_keys: bool = ..., +) -> Dict[str, Any]: ... + +# TODO: add support for returning NamedTuple from the mypy plugin +def astuple( + inst: Any, + recurse: bool = ..., + filter: Optional[_FilterType[Any]] = ..., + tuple_factory: Type[Sequence[Any]] = ..., + retain_collection_types: bool = ..., +) -> Tuple[Any, ...]: ... diff --git a/pythonFiles/lib/jedilsp/attrs/converters.py b/pythonFiles/lib/jedilsp/attrs/converters.py new file mode 100644 index 0000000000000..edfa8d3c16ac8 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/converters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.converters import * # noqa diff --git a/pythonFiles/lib/jedilsp/attrs/exceptions.py b/pythonFiles/lib/jedilsp/attrs/exceptions.py new file mode 100644 index 0000000000000..bd9efed202ab1 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/exceptions.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.exceptions import * # noqa diff --git a/pythonFiles/lib/jedilsp/attrs/filters.py b/pythonFiles/lib/jedilsp/attrs/filters.py new file mode 100644 index 0000000000000..52959005b088f --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/filters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.filters import * # noqa diff --git a/pythonFiles/lib/jedilsp/attrs/py.typed b/pythonFiles/lib/jedilsp/attrs/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/attrs/setters.py b/pythonFiles/lib/jedilsp/attrs/setters.py new file mode 100644 index 0000000000000..9b50770804e41 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/setters.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.setters import * # noqa diff --git a/pythonFiles/lib/jedilsp/attrs/validators.py b/pythonFiles/lib/jedilsp/attrs/validators.py new file mode 100644 index 0000000000000..ab2c9b3024714 --- /dev/null +++ b/pythonFiles/lib/jedilsp/attrs/validators.py @@ -0,0 +1,3 @@ +# SPDX-License-Identifier: MIT + +from attr.validators import * # noqa diff --git a/pythonFiles/lib/jedilsp/bin/jedi-language-server b/pythonFiles/lib/jedilsp/bin/jedi-language-server new file mode 100755 index 0000000000000..c0bb7dada59c2 --- /dev/null +++ b/pythonFiles/lib/jedilsp/bin/jedi-language-server @@ -0,0 +1,8 @@ +#!/Users/paulacamargo/Documents/microsoft/vscode-python/.venv/bin/python +# -*- coding: utf-8 -*- +import re +import sys +from jedi_language_server.cli import cli +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(cli()) diff --git a/pythonFiles/lib/jedilsp/cattr/__init__.py b/pythonFiles/lib/jedilsp/cattr/__init__.py new file mode 100644 index 0000000000000..6c262fe85e778 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/__init__.py @@ -0,0 +1,25 @@ +from .converters import BaseConverter, Converter, GenConverter, UnstructureStrategy +from .gen import override + +__all__ = ( + "global_converter", + "unstructure", + "structure", + "structure_attrs_fromtuple", + "structure_attrs_fromdict", + "UnstructureStrategy", + "BaseConverter", + "Converter", + "GenConverter", + "override", +) +from cattrs import global_converter + +unstructure = global_converter.unstructure +structure = global_converter.structure +structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple +structure_attrs_fromdict = global_converter.structure_attrs_fromdict +register_structure_hook = global_converter.register_structure_hook +register_structure_hook_func = global_converter.register_structure_hook_func +register_unstructure_hook = global_converter.register_unstructure_hook +register_unstructure_hook_func = global_converter.register_unstructure_hook_func diff --git a/pythonFiles/lib/jedilsp/cattr/converters.py b/pythonFiles/lib/jedilsp/cattr/converters.py new file mode 100644 index 0000000000000..4434fe5b4b283 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/converters.py @@ -0,0 +1,8 @@ +from cattrs.converters import ( + BaseConverter, + Converter, + GenConverter, + UnstructureStrategy, +) + +__all__ = ["BaseConverter", "Converter", "GenConverter", "UnstructureStrategy"] diff --git a/pythonFiles/lib/jedilsp/cattr/disambiguators.py b/pythonFiles/lib/jedilsp/cattr/disambiguators.py new file mode 100644 index 0000000000000..f10797ade5b3b --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/disambiguators.py @@ -0,0 +1,3 @@ +from cattrs.disambiguators import create_uniq_field_dis_func + +__all__ = ["create_uniq_field_dis_func"] diff --git a/pythonFiles/lib/jedilsp/cattr/dispatch.py b/pythonFiles/lib/jedilsp/cattr/dispatch.py new file mode 100644 index 0000000000000..2474247f9c67a --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/dispatch.py @@ -0,0 +1,3 @@ +from cattrs.dispatch import FunctionDispatch, MultiStrategyDispatch + +__all__ = ["FunctionDispatch", "MultiStrategyDispatch"] diff --git a/pythonFiles/lib/jedilsp/cattr/errors.py b/pythonFiles/lib/jedilsp/cattr/errors.py new file mode 100644 index 0000000000000..af092e9ca60ab --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/errors.py @@ -0,0 +1,15 @@ +from cattrs.errors import ( + BaseValidationError, + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, + StructureHandlerNotFoundError, +) + +__all__ = [ + "BaseValidationError", + "ClassValidationError", + "ForbiddenExtraKeysError", + "IterableValidationError", + "StructureHandlerNotFoundError", +] diff --git a/pythonFiles/lib/jedilsp/cattr/gen.py b/pythonFiles/lib/jedilsp/cattr/gen.py new file mode 100644 index 0000000000000..660d4d5392a90 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/gen.py @@ -0,0 +1,21 @@ +from cattrs.gen import ( + AttributeOverride, + make_dict_structure_fn, + make_dict_unstructure_fn, + make_hetero_tuple_unstructure_fn, + make_iterable_unstructure_fn, + make_mapping_structure_fn, + make_mapping_unstructure_fn, + override, +) + +__all__ = [ + "AttributeOverride", + "make_dict_structure_fn", + "make_dict_unstructure_fn", + "make_hetero_tuple_unstructure_fn", + "make_iterable_unstructure_fn", + "make_mapping_structure_fn", + "make_mapping_unstructure_fn", + "override", +] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/__init__.py b/pythonFiles/lib/jedilsp/cattr/preconf/__init__.py new file mode 100644 index 0000000000000..fa6ad352b659f --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/__init__.py @@ -0,0 +1,3 @@ +from cattrs.preconf import validate_datetime + +__all__ = ["validate_datetime"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/bson.py b/pythonFiles/lib/jedilsp/cattr/preconf/bson.py new file mode 100644 index 0000000000000..1ebe448e7115e --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/bson.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for bson.""" +from cattrs.preconf.bson import BsonConverter, configure_converter, make_converter + +__all__ = ["BsonConverter", "configure_converter", "make_converter"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/json.py b/pythonFiles/lib/jedilsp/cattr/preconf/json.py new file mode 100644 index 0000000000000..deb5def2ae7a8 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/json.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for the stdlib json.""" +from cattrs.preconf.json import configure_converter, JsonConverter, make_converter + +__all__ = ["configure_converter", "JsonConverter", "make_converter"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/msgpack.py b/pythonFiles/lib/jedilsp/cattr/preconf/msgpack.py new file mode 100644 index 0000000000000..ca5775596dc58 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/msgpack.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for msgpack.""" +from cattrs.preconf.msgpack import configure_converter, make_converter, MsgpackConverter + +__all__ = ["configure_converter", "make_converter", "MsgpackConverter"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/orjson.py b/pythonFiles/lib/jedilsp/cattr/preconf/orjson.py new file mode 100644 index 0000000000000..42db31c4ff143 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/orjson.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for orjson.""" +from cattrs.preconf.orjson import configure_converter, make_converter, OrjsonConverter + +__all__ = ["configure_converter", "make_converter", "OrjsonConverter"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/pyyaml.py b/pythonFiles/lib/jedilsp/cattr/preconf/pyyaml.py new file mode 100644 index 0000000000000..7c9e68ab3ba70 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/pyyaml.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for pyyaml.""" +from cattrs.preconf.pyyaml import configure_converter, make_converter, PyyamlConverter + +__all__ = ["configure_converter", "make_converter", "PyyamlConverter"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/tomlkit.py b/pythonFiles/lib/jedilsp/cattr/preconf/tomlkit.py new file mode 100644 index 0000000000000..94743f41e54fc --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/tomlkit.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for tomlkit.""" +from cattrs.preconf.tomlkit import configure_converter, make_converter, TomlkitConverter + +__all__ = ["configure_converter", "make_converter", "TomlkitConverter"] diff --git a/pythonFiles/lib/jedilsp/cattr/preconf/ujson.py b/pythonFiles/lib/jedilsp/cattr/preconf/ujson.py new file mode 100644 index 0000000000000..fd008bc255d2f --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattr/preconf/ujson.py @@ -0,0 +1,4 @@ +"""Preconfigured converters for ujson.""" +from cattrs.preconf.ujson import configure_converter, make_converter, UjsonConverter + +__all__ = ["configure_converter", "make_converter", UjsonConverter] diff --git a/pythonFiles/lib/jedilsp/cattr/py.typed b/pythonFiles/lib/jedilsp/cattr/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/INSTALLER b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/INSTALLER new file mode 100644 index 0000000000000..a1b589e38a320 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/LICENSE b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/LICENSE new file mode 100644 index 0000000000000..340022c335c35 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/LICENSE @@ -0,0 +1,11 @@ + +MIT License + +Copyright (c) 2016, Tin Tvrtković + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + diff --git a/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/METADATA b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/METADATA new file mode 100644 index 0000000000000..0112d0c387ffa --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/METADATA @@ -0,0 +1,214 @@ +Metadata-Version: 2.1 +Name: cattrs +Version: 22.2.0 +Summary: Composable complex class support for attrs and dataclasses. +Home-page: https://github.com/python-attrs/cattrs +License: MIT +Keywords: attrs,serialization,dataclasses +Author: Tin Tvrtkovic +Author-email: tinchester@gmail.com +Requires-Python: >=3.7 +Classifier: License :: OSI Approved :: MIT License +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.7 +Classifier: Programming Language :: Python :: 3.8 +Classifier: Programming Language :: Python :: 3.9 +Classifier: Programming Language :: Python :: 3.10 +Requires-Dist: attrs (>=20) +Requires-Dist: exceptiongroup; python_version < "3.11" +Requires-Dist: typing_extensions; python_version < "3.8" +Project-URL: Bug Tracker, https://github.com/python-attrs/cattrs/issues +Project-URL: Changelog, https://cattrs.readthedocs.io/en/latest/history.html +Project-URL: Documentation, https://cattrs.readthedocs.io/en/latest/ +Project-URL: Repository, https://github.com/python-attrs/cattrs +Description-Content-Type: text/x-rst + +====== +cattrs +====== + + +.. image:: https://img.shields.io/pypi/v/cattrs.svg + :target: https://pypi.python.org/pypi/cattrs + +.. image:: https://github.com/python-attrs/cattrs/workflows/CI/badge.svg + :target: https://github.com/python-attrs/cattrs/actions?workflow=CI + +.. image:: https://readthedocs.org/projects/cattrs/badge/?version=latest + :target: https://cattrs.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +.. image:: https://img.shields.io/pypi/pyversions/cattrs.svg + :target: https://github.com/python-attrs/cattrs + :alt: Supported Python versions + +.. image:: https://codecov.io/gh/python-attrs/cattrs/branch/master/graph/badge.svg + :target: https://codecov.io/gh/python-attrs/cattrs/ + +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + + +---- + +``cattrs`` is an open source Python library for structuring and unstructuring +data. ``cattrs`` works best with ``attrs`` classes, dataclasses and the usual +Python collections, but other kinds of classes are supported by manually +registering converters. + +Python has a rich set of powerful, easy to use, built-in data types like +dictionaries, lists and tuples. These data types are also the lingua franca +of most data serialization libraries, for formats like json, msgpack, yaml or +toml. + +Data types like this, and mappings like ``dict`` s in particular, represent +unstructured data. Your data is, in all likelihood, structured: not all +combinations of field names or values are valid inputs to your programs. In +Python, structured data is better represented with classes and enumerations. +``attrs`` is an excellent library for declaratively describing the structure of +your data, and validating it. + +When you're handed unstructured data (by your network, file system, database...), +``cattrs`` helps to convert this data into structured data. When you have to +convert your structured data into data types other libraries can handle, +``cattrs`` turns your classes and enumerations into dictionaries, integers and +strings. + +Here's a simple taste. The list containing a float, an int and a string +gets converted into a tuple of three ints. + +.. code-block:: pycon + + >>> import cattrs + >>> + >>> cattrs.structure([1.0, 2, "3"], tuple[int, int, int]) + (1, 2, 3) + +``cattrs`` works well with ``attrs`` classes out of the box. + +.. code-block:: pycon + + >>> from attrs import frozen + >>> import cattrs + >>> + >>> @frozen # It works with non-frozen classes too. + ... class C: + ... a: int + ... b: str + ... + >>> instance = C(1, 'a') + >>> cattrs.unstructure(instance) + {'a': 1, 'b': 'a'} + >>> cattrs.structure({'a': 1, 'b': 'a'}, C) + C(a=1, b='a') + +Here's a much more complex example, involving ``attrs`` classes with type +metadata. + +.. code-block:: pycon + + >>> from enum import unique, Enum + >>> from typing import Optional, Sequence, Union + >>> from cattrs import structure, unstructure + >>> from attrs import define, field + >>> + >>> @unique + ... class CatBreed(Enum): + ... SIAMESE = "siamese" + ... MAINE_COON = "maine_coon" + ... SACRED_BIRMAN = "birman" + ... + >>> @define + ... class Cat: + ... breed: CatBreed + ... names: Sequence[str] + ... + >>> @define + ... class DogMicrochip: + ... chip_id = field() # Type annotations are optional, but recommended + ... time_chipped: float = field() + ... + >>> @define + ... class Dog: + ... cuteness: int + ... chip: Optional[DogMicrochip] = None + ... + >>> p = unstructure([Dog(cuteness=1, chip=DogMicrochip(chip_id=1, time_chipped=10.0)), + ... Cat(breed=CatBreed.MAINE_COON, names=('Fluffly', 'Fluffer'))]) + ... + >>> print(p) + [{'cuteness': 1, 'chip': {'chip_id': 1, 'time_chipped': 10.0}}, {'breed': 'maine_coon', 'names': ('Fluffly', 'Fluffer')}] + >>> print(structure(p, list[Union[Dog, Cat]])) + [Dog(cuteness=1, chip=DogMicrochip(chip_id=1, time_chipped=10.0)), Cat(breed=, names=['Fluffly', 'Fluffer'])] + +Consider unstructured data a low-level representation that needs to be converted +to structured data to be handled, and use ``structure``. When you're done, +``unstructure`` the data to its unstructured form and pass it along to another +library or module. Use `attrs type metadata `_ +to add type metadata to attributes, so ``cattrs`` will know how to structure and +destructure them. + +* Free software: MIT license +* Documentation: https://cattrs.readthedocs.io. +* Python versions supported: 3.7 and up. (Older Python versions, like 2.7, 3.5 and 3.6 are supported by older versions; see the changelog.) + + +Features +-------- + +* Converts structured data into unstructured data, recursively: + + * ``attrs`` classes and dataclasses are converted into dictionaries in a way similar to ``attrs.asdict``, or into tuples in a way similar to ``attrs.astuple``. + * Enumeration instances are converted to their values. + * Other types are let through without conversion. This includes types such as + integers, dictionaries, lists and instances of non-``attrs`` classes. + * Custom converters for any type can be registered using ``register_unstructure_hook``. + +* Converts unstructured data into structured data, recursively, according to + your specification given as a type. The following types are supported: + + * ``typing.Optional[T]``. + * ``typing.List[T]``, ``typing.MutableSequence[T]``, ``typing.Sequence[T]`` (converts to a list). + * ``typing.Tuple`` (both variants, ``Tuple[T, ...]`` and ``Tuple[X, Y, Z]``). + * ``typing.MutableSet[T]``, ``typing.Set[T]`` (converts to a set). + * ``typing.FrozenSet[T]`` (converts to a frozenset). + * ``typing.Dict[K, V]``, ``typing.MutableMapping[K, V]``, ``typing.Mapping[K, V]`` (converts to a dict). + * ``attrs`` classes with simple attributes and the usual ``__init__``. + + * Simple attributes are attributes that can be assigned unstructured data, + like numbers, strings, and collections of unstructured data. + + * All `attrs` classes and dataclasses with the usual ``__init__``, if their complex attributes have type metadata. + * ``typing.Union`` s of supported ``attrs`` classes, given that all of the classes have a unique field. + * ``typing.Union`` s of anything, given that you provide a disambiguation function for it. + * Custom converters for any type can be registered using ``register_structure_hook``. + +``cattrs`` comes with preconfigured converters for a number of serialization libraries, including json, msgpack, bson, yaml and toml. +For details, see the `cattr.preconf package `_. + +Additional documentation +------------------------ +* `On structured and unstructured data, or the case for cattrs `_ +* `Why I use attrs instead of pydantic `_ +* `cattrs I: un/structuring speed `_ + +Credits +------- + +Major credits to Hynek Schlawack for creating attrs_ and its predecessor, +characteristic_. + +``cattrs`` is tested with Hypothesis_, by David R. MacIver. + +``cattrs`` is benchmarked using perf_ and pytest-benchmark_. + +This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template. + +.. _attrs: https://github.com/hynek/attrs +.. _characteristic: https://github.com/hynek/characteristic +.. _Hypothesis: http://hypothesis.readthedocs.io/en/latest/ +.. _perf: https://github.com/haypo/perf +.. _pytest-benchmark: https://pytest-benchmark.readthedocs.io/en/latest/index.html +.. _Cookiecutter: https://github.com/audreyr/cookiecutter +.. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage + diff --git a/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/RECORD b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/RECORD new file mode 100644 index 0000000000000..b4f86791f8e30 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/RECORD @@ -0,0 +1,68 @@ +cattr/__init__.py,sha256=pODFKaZ7MisyHe_XPc9X6KKG73mqduHUvQO142XwijY,906 +cattr/__pycache__/__init__.cpython-39.pyc,, +cattr/__pycache__/converters.cpython-39.pyc,, +cattr/__pycache__/disambiguators.cpython-39.pyc,, +cattr/__pycache__/dispatch.cpython-39.pyc,, +cattr/__pycache__/errors.cpython-39.pyc,, +cattr/__pycache__/gen.cpython-39.pyc,, +cattr/converters.py,sha256=rQhY4J8r7QTZh5WICuFe4GWO1v0DS3DgQ9r569zd6jg,192 +cattr/disambiguators.py,sha256=ugD1fq1Z5x1pGu5P1lMzcT-IEi1q7IfQJIHEdmg62vM,103 +cattr/dispatch.py,sha256=uVEOgHWR9Hn5tm-wIw-bDccqrxJByVi8yRKaYyvL67k,125 +cattr/errors.py,sha256=V4RhoCObwGrlaM3oyn1H_FYxGR8iAB9dG5NxFDYM548,343 +cattr/gen.py,sha256=3YO5IOioAd5so_ksyWgvevMS0d4L95Ny4fVzt_wFYrM,520 +cattr/preconf/__init__.py,sha256=NqPE7uhVfcP-PggkUpsbfAutMo8oHjcoB1cvjgLft-s,78 +cattr/preconf/__pycache__/__init__.cpython-39.pyc,, +cattr/preconf/__pycache__/bson.cpython-39.pyc,, +cattr/preconf/__pycache__/json.cpython-39.pyc,, +cattr/preconf/__pycache__/msgpack.cpython-39.pyc,, +cattr/preconf/__pycache__/orjson.cpython-39.pyc,, +cattr/preconf/__pycache__/pyyaml.cpython-39.pyc,, +cattr/preconf/__pycache__/tomlkit.cpython-39.pyc,, +cattr/preconf/__pycache__/ujson.cpython-39.pyc,, +cattr/preconf/bson.py,sha256=xSFxZt7xMGu95YtXH93NRa89CxraZe48NxlNibS9lBM,194 +cattr/preconf/json.py,sha256=XmvF7KNmzgk6lfi3XL7I-1Zq-5ZVPdN1SjaWoTyHAMU,205 +cattr/preconf/msgpack.py,sha256=iUST2Hgfm0cLYZ6RUYWYJKFeMZMQWqb14SVtchWgJK4,206 +cattr/preconf/orjson.py,sha256=zn8qs82j0dtfdjfXjJQQNLlH5ltw4g64fkRxOaAzRQE,202 +cattr/preconf/pyyaml.py,sha256=7pt4UdJmIeNBeS85n-TIqrzhGrf8byn6UfDMDLeV0H4,202 +cattr/preconf/tomlkit.py,sha256=8bKiWMIH8cTahgHacnGdvMbTNb_HOy6TTuAhsmbYYRQ,206 +cattr/preconf/ujson.py,sha256=bE0B2ihgeEEbgMZqll4traIaV-N1QE-6uu-8HnDB21I,196 +cattr/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cattrs-22.2.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +cattrs-22.2.0.dist-info/LICENSE,sha256=9fudHt43qIykf0IMSZ3KD0oFvJk-Esd9I1IKrSkcAb8,1074 +cattrs-22.2.0.dist-info/METADATA,sha256=zDoEbLD4jQdHQdQjESnXaHfSv0owbayysej9qUYbI0A,9046 +cattrs-22.2.0.dist-info/RECORD,, +cattrs-22.2.0.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +cattrs-22.2.0.dist-info/WHEEL,sha256=bbU3AyvhQ312rVm7zzRQjs6axI1UYWC3nmFA2E6FFSI,88 +cattrs/__init__.py,sha256=rLTRU4V7QidokoiVKQwHNbP1TNbvaQcWWwxmumlnk5Y,1459 +cattrs/__pycache__/__init__.cpython-39.pyc,, +cattrs/__pycache__/_compat.cpython-39.pyc,, +cattrs/__pycache__/_generics.cpython-39.pyc,, +cattrs/__pycache__/converters.cpython-39.pyc,, +cattrs/__pycache__/disambiguators.cpython-39.pyc,, +cattrs/__pycache__/dispatch.cpython-39.pyc,, +cattrs/__pycache__/errors.cpython-39.pyc,, +cattrs/__pycache__/gen.cpython-39.pyc,, +cattrs/_compat.py,sha256=p7gi-BQdDLKBej1_5FaIwRfROfl81zlIS_FZk2S1XHI,11903 +cattrs/_generics.py,sha256=06Xddc7PgaZn2xdSSQ3dYId-wl2YRMObi72d35cyzvY,678 +cattrs/converters.py,sha256=CgccGPCXMTWoKi4MD21tjzTlZb6Yta-lwSOm-Mnwsy4,37591 +cattrs/disambiguators.py,sha256=GstF2CAuyMXGq8NI7kOOGt5a3Yw0r4VojtVQP6yfwfg,2243 +cattrs/dispatch.py,sha256=hUY9QuPDHEX2tFLcr1bd-JlC-gW8mk_zPNP7BsE96JU,4543 +cattrs/errors.py,sha256=EIA3dauLhCJo2UvUwUmY12QlCkkgKuWV6EyIx2QwkaY,1583 +cattrs/gen.py,sha256=3w0KTjBJeFD4_kXvNtlFf-x9pSzHiykE1u2DDtvxgWM,27240 +cattrs/preconf/__init__.py,sha256=llrsaQAQ_c6gf6niLeNDvzLNxXajVyQwr1BDLe_cuTo,165 +cattrs/preconf/__pycache__/__init__.cpython-39.pyc,, +cattrs/preconf/__pycache__/bson.cpython-39.pyc,, +cattrs/preconf/__pycache__/json.cpython-39.pyc,, +cattrs/preconf/__pycache__/msgpack.cpython-39.pyc,, +cattrs/preconf/__pycache__/orjson.cpython-39.pyc,, +cattrs/preconf/__pycache__/pyyaml.cpython-39.pyc,, +cattrs/preconf/__pycache__/tomlkit.cpython-39.pyc,, +cattrs/preconf/__pycache__/ujson.cpython-39.pyc,, +cattrs/preconf/bson.py,sha256=h9WDdj4DBAG-bXRXjpEly-sGLSeSMyvLz2HMaypuZP4,3070 +cattrs/preconf/json.py,sha256=1uXVtzawrmFmfDV1S0BondXtB1PvgKXXci1xgJT1x2U,1588 +cattrs/preconf/msgpack.py,sha256=051-BMYDxBDxr2uH6ol8NBBLmOStKR6ihi0kWD3lmV4,1275 +cattrs/preconf/orjson.py,sha256=8aFn7G3U_DOi3_-tThmWgAxtT8b5w3rY_my2nIHkOjE,2551 +cattrs/preconf/pyyaml.py,sha256=8WlOYE3ZlN_fQ-lmpaByrNJ8WGimMHcusl3ZjNGgTE4,1298 +cattrs/preconf/tomlkit.py,sha256=D5G0s1IGN0g9Hxn89v0MN1f8IXICjKYVy9w91cPwoxo,2512 +cattrs/preconf/ujson.py,sha256=58Y0SGATb_1-_Mq4z2TuY24c91dIPI6nN6-cn7bdtFM,1498 +cattrs/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 diff --git a/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/REQUESTED b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/REQUESTED new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/WHEEL b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/WHEEL new file mode 100644 index 0000000000000..2892f30b50d35 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs-22.2.0.dist-info/WHEEL @@ -0,0 +1,4 @@ +Wheel-Version: 1.0 +Generator: poetry-core 1.1.0 +Root-Is-Purelib: true +Tag: py3-none-any diff --git a/pythonFiles/lib/jedilsp/cattrs/__init__.py b/pythonFiles/lib/jedilsp/cattrs/__init__.py new file mode 100644 index 0000000000000..f8b8801ff9a3d --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/__init__.py @@ -0,0 +1,49 @@ +from .converters import BaseConverter, Converter, GenConverter, UnstructureStrategy +from .errors import ( + BaseValidationError, + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, + StructureHandlerNotFoundError, +) +from .gen import override + +__all__ = ( + "BaseConverter", + "BaseValidationError", + "ClassValidationError", + "Converter", + "converters", + "disambiguators", + "dispatch", + "errors", + "ForbiddenExtraKeysError", + "gen", + "GenConverter", + "global_converter", + "IterableValidationError", + "override", + "preconf", + "register_structure_hook", + "register_structure_hook_func", + "register_unstructure_hook", + "register_unstructure_hook_func", + "structure", + "structure_attrs_fromdict", + "structure_attrs_fromtuple", + "StructureHandlerNotFoundError", + "unstructure", + "UnstructureStrategy", +) + + +global_converter = Converter() + +unstructure = global_converter.unstructure +structure = global_converter.structure +structure_attrs_fromtuple = global_converter.structure_attrs_fromtuple +structure_attrs_fromdict = global_converter.structure_attrs_fromdict +register_structure_hook = global_converter.register_structure_hook +register_structure_hook_func = global_converter.register_structure_hook_func +register_unstructure_hook = global_converter.register_unstructure_hook +register_unstructure_hook_func = global_converter.register_unstructure_hook_func diff --git a/pythonFiles/lib/jedilsp/cattrs/_compat.py b/pythonFiles/lib/jedilsp/cattrs/_compat.py new file mode 100644 index 0000000000000..c8e77cdd10036 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/_compat.py @@ -0,0 +1,389 @@ +import builtins +import sys +from collections.abc import MutableSet as AbcMutableSet +from collections.abc import Set as AbcSet +from dataclasses import MISSING +from dataclasses import fields as dataclass_fields +from dataclasses import is_dataclass +from typing import AbstractSet as TypingAbstractSet +from typing import Any, Dict, FrozenSet, List +from typing import Mapping as TypingMapping +from typing import MutableMapping as TypingMutableMapping +from typing import MutableSequence as TypingMutableSequence +from typing import MutableSet as TypingMutableSet +from typing import NewType, Optional +from typing import Sequence as TypingSequence +from typing import Set as TypingSet +from typing import Tuple, get_type_hints + +from attr import NOTHING, Attribute, Factory +from attr import fields as attrs_fields +from attr import resolve_types + +version_info = sys.version_info[0:3] +is_py37 = version_info[:2] == (3, 7) +is_py38 = version_info[:2] == (3, 8) +is_py39_plus = version_info[:2] >= (3, 9) +is_py310_plus = version_info[:2] >= (3, 10) + +if is_py37: + + def get_args(cl): + return cl.__args__ + + def get_origin(cl): + return getattr(cl, "__origin__", None) + + from typing_extensions import Protocol + +else: + from typing import Protocol, get_args, get_origin # NOQA + +if "ExceptionGroup" not in dir(builtins): + from exceptiongroup import ExceptionGroup +else: + ExceptionGroup = ExceptionGroup + + +def has(cls): + return hasattr(cls, "__attrs_attrs__") or hasattr(cls, "__dataclass_fields__") + + +def has_with_generic(cls): + """Test whether the class if a normal or generic attrs or dataclass.""" + return has(cls) or has(get_origin(cls)) + + +def fields(type): + try: + return type.__attrs_attrs__ + except AttributeError: + try: + return dataclass_fields(type) + except AttributeError: + raise Exception("Not an attrs or dataclass class.") + + +def adapted_fields(cl) -> List[Attribute]: + """Return the attrs format of `fields()` for attrs and dataclasses.""" + if is_dataclass(cl): + attrs = dataclass_fields(cl) + if any(isinstance(a.type, str) for a in attrs): + # Do this conditionally in case `get_type_hints` fails, so + # users can resolve on their own first. + type_hints = get_type_hints(cl) + else: + type_hints = {} + return [ + Attribute( + attr.name, + attr.default + if attr.default is not MISSING + else ( + Factory(attr.default_factory) + if attr.default_factory is not MISSING + else NOTHING + ), + None, + True, + None, + True, + attr.init, + True, + type=type_hints.get(attr.name, attr.type), + ) + for attr in attrs + ] + else: + attribs = attrs_fields(cl) + if any(isinstance(a.type, str) for a in attribs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + attribs = attrs_fields(cl) + return attribs + + +def is_hetero_tuple(type: Any) -> bool: + origin = getattr(type, "__origin__", None) + return origin is tuple and ... not in type.__args__ + + +def is_protocol(type: Any) -> bool: + return issubclass(type, Protocol) and getattr(type, "_is_protocol", False) + + +OriginAbstractSet = AbcSet +OriginMutableSet = AbcMutableSet + +if is_py37 or is_py38: + Set = TypingSet + AbstractSet = TypingAbstractSet + MutableSet = TypingMutableSet + + Sequence = TypingSequence + MutableSequence = TypingMutableSequence + MutableMapping = TypingMutableMapping + Mapping = TypingMapping + FrozenSetSubscriptable = FrozenSet + TupleSubscriptable = Tuple + + from collections import Counter as ColCounter + from typing import Counter, Union, _GenericAlias + + def is_annotated(_): + return False + + def is_tuple(type): + return type in (Tuple, tuple) or ( + type.__class__ is _GenericAlias and issubclass(type.__origin__, Tuple) + ) + + def is_union_type(obj): + return ( + obj is Union or isinstance(obj, _GenericAlias) and obj.__origin__ is Union + ) + + def get_newtype_base(typ: Any) -> Optional[type]: + supertype = getattr(typ, "__supertype__", None) + if ( + supertype is not None + and getattr(typ, "__qualname__", "") == "NewType..new_type" + and typ.__module__ in ("typing", "typing_extensions") + ): + return supertype + return None + + def is_sequence(type: Any) -> bool: + return type in (List, list, Tuple, tuple) or ( + type.__class__ is _GenericAlias + and ( + type.__origin__ not in (Union, Tuple, tuple) + and issubclass(type.__origin__, TypingSequence) + ) + or (type.__origin__ in (Tuple, tuple) and type.__args__[1] is ...) + ) + + def is_mutable_set(type): + return type is set or ( + type.__class__ is _GenericAlias and issubclass(type.__origin__, MutableSet) + ) + + def is_frozenset(type): + return type is frozenset or ( + type.__class__ is _GenericAlias and issubclass(type.__origin__, FrozenSet) + ) + + def is_mapping(type): + return type in (TypingMapping, dict) or ( + type.__class__ is _GenericAlias + and issubclass(type.__origin__, TypingMapping) + ) + + bare_generic_args = { + List.__args__, + TypingSequence.__args__, + TypingMapping.__args__, + Dict.__args__, + TypingMutableSequence.__args__, + Tuple.__args__, + None, # non-parametrized containers do not have `__args__ attribute in py3.7-8 + } + + def is_bare(type): + return getattr(type, "__args__", None) in bare_generic_args + + def is_counter(type): + return ( + type in (Counter, ColCounter) + or getattr(type, "__origin__", None) is ColCounter + ) + + if is_py38: + from typing import Literal + + def is_literal(type) -> bool: + return type.__class__ is _GenericAlias and type.__origin__ is Literal + + else: + # No literals in 3.7. + def is_literal(_) -> bool: + return False + + def is_generic(obj): + return isinstance(obj, _GenericAlias) + + def copy_with(type, args): + """Replace a generic type's arguments.""" + return type.copy_with(args) + +else: + # 3.9+ + from collections import Counter + from collections.abc import Mapping as AbcMapping + from collections.abc import MutableMapping as AbcMutableMapping + from collections.abc import MutableSequence as AbcMutableSequence + from collections.abc import MutableSet as AbcMutableSet + from collections.abc import Sequence as AbcSequence + from collections.abc import Set as AbcSet + from types import GenericAlias + from typing import Annotated + from typing import Counter as TypingCounter + from typing import ( + Union, + _AnnotatedAlias, + _GenericAlias, + _SpecialGenericAlias, + _UnionGenericAlias, + ) + + try: + # Not present on 3.9.0, so we try carefully. + from typing import _LiteralGenericAlias + + def is_literal(type) -> bool: + return type.__class__ is _LiteralGenericAlias + + except ImportError: + + def is_literal(_) -> bool: + return False + + Set = AbcSet + AbstractSet = AbcSet + MutableSet = AbcMutableSet + Sequence = AbcSequence + MutableSequence = AbcMutableSequence + MutableMapping = AbcMutableMapping + Mapping = AbcMapping + FrozenSetSubscriptable = frozenset + TupleSubscriptable = tuple + + def is_annotated(type) -> bool: + return getattr(type, "__class__", None) is _AnnotatedAlias + + def is_tuple(type): + return ( + type in (Tuple, tuple) + or (type.__class__ is _GenericAlias and issubclass(type.__origin__, Tuple)) + or (getattr(type, "__origin__", None) is tuple) + ) + + if is_py310_plus: + + def is_union_type(obj): + from types import UnionType + + return ( + obj is Union + or (isinstance(obj, _UnionGenericAlias) and obj.__origin__ is Union) + or isinstance(obj, UnionType) + ) + + def get_newtype_base(typ: Any) -> Optional[type]: + if typ is NewType or isinstance(typ, NewType): + return typ.__supertype__ + return None + + else: + + def is_union_type(obj): + return ( + obj is Union + or isinstance(obj, _UnionGenericAlias) + and obj.__origin__ is Union + ) + + def get_newtype_base(typ: Any) -> Optional[type]: + supertype = getattr(typ, "__supertype__", None) + if ( + supertype is not None + and getattr(typ, "__qualname__", "") == "NewType..new_type" + and typ.__module__ in ("typing", "typing_extensions") + ): + return supertype + return None + + def is_sequence(type: Any) -> bool: + origin = getattr(type, "__origin__", None) + return ( + type + in ( + List, + list, + TypingSequence, + TypingMutableSequence, + AbcMutableSequence, + Tuple, + tuple, + ) + or ( + type.__class__ is _GenericAlias + and ( + (origin is not tuple) + and issubclass(origin, TypingSequence) + or origin is tuple + and type.__args__[1] is ... + ) + ) + or (origin in (list, AbcMutableSequence, AbcSequence)) + or (origin is tuple and type.__args__[1] is ...) + ) + + def is_mutable_set(type): + return ( + type in (TypingSet, TypingMutableSet, set) + or ( + type.__class__ is _GenericAlias + and issubclass(type.__origin__, TypingMutableSet) + ) + or (getattr(type, "__origin__", None) in (set, AbcMutableSet, AbcSet)) + ) + + def is_frozenset(type): + return ( + type in (FrozenSet, frozenset) + or ( + type.__class__ is _GenericAlias + and issubclass(type.__origin__, FrozenSet) + ) + or (getattr(type, "__origin__", None) is frozenset) + ) + + def is_bare(type): + return isinstance(type, _SpecialGenericAlias) or ( + not hasattr(type, "__origin__") and not hasattr(type, "__args__") + ) + + def is_mapping(type): + return ( + type in (TypingMapping, Dict, TypingMutableMapping, dict, AbcMutableMapping) + or ( + type.__class__ is _GenericAlias + and issubclass(type.__origin__, TypingMapping) + ) + or ( + getattr(type, "__origin__", None) + in (dict, AbcMutableMapping, AbcMapping) + ) + or issubclass(type, dict) + ) + + def is_counter(type): + return ( + type in (Counter, TypingCounter) + or getattr(type, "__origin__", None) is Counter + ) + + def is_generic(obj): + return isinstance(obj, _GenericAlias) or isinstance(obj, GenericAlias) + + def copy_with(type, args): + """Replace a generic type's arguments.""" + if is_annotated(type): + # typing.Annotated requires a special case. + return Annotated[args] # type: ignore + return type.__origin__[args] + + +def is_generic_attrs(type): + return is_generic(type) and has(type.__origin__) diff --git a/pythonFiles/lib/jedilsp/cattrs/_generics.py b/pythonFiles/lib/jedilsp/cattrs/_generics.py new file mode 100644 index 0000000000000..b69e058059be2 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/_generics.py @@ -0,0 +1,22 @@ +from typing import Any, Mapping + +from ._compat import copy_with, get_args, is_annotated, is_generic + + +def deep_copy_with(t, mapping: Mapping[str, Any]): + args = get_args(t) + rest = () + if is_annotated(t) and args: + # If we're dealing with `Annotated`, we only map the first type parameter + rest = tuple(args[1:]) + args = (args[0],) + new_args = ( + tuple( + mapping[a.__name__] + if hasattr(a, "__name__") and a.__name__ in mapping + else (deep_copy_with(a, mapping) if is_generic(a) else a) + for a in args + ) + + rest + ) + return copy_with(t, new_args) if new_args != args else t diff --git a/pythonFiles/lib/jedilsp/cattrs/converters.py b/pythonFiles/lib/jedilsp/cattrs/converters.py new file mode 100644 index 0000000000000..1221f65f97284 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/converters.py @@ -0,0 +1,992 @@ +from collections import Counter +from collections.abc import MutableSet as AbcMutableSet +from dataclasses import Field +from enum import Enum +from functools import lru_cache +from typing import ( + Any, + Callable, + Dict, + Iterable, + List, + NoReturn, + Optional, + Tuple, + Type, + TypeVar, + Union, +) + +from attr import Attribute +from attr import has as attrs_has +from attr import resolve_types + +from cattrs.errors import IterableValidationError, StructureHandlerNotFoundError + +from ._compat import ( + FrozenSetSubscriptable, + Mapping, + MutableMapping, + MutableSequence, + OriginAbstractSet, + OriginMutableSet, + Sequence, + Set, + fields, + get_newtype_base, + get_origin, + has, + has_with_generic, + is_annotated, + is_bare, + is_counter, + is_frozenset, + is_generic, + is_generic_attrs, + is_hetero_tuple, + is_literal, + is_mapping, + is_mutable_set, + is_protocol, + is_sequence, + is_tuple, + is_union_type, +) +from .disambiguators import create_uniq_field_dis_func +from .dispatch import MultiStrategyDispatch +from .gen import ( + AttributeOverride, + DictStructureFn, + HeteroTupleUnstructureFn, + IterableUnstructureFn, + MappingStructureFn, + MappingUnstructureFn, + make_dict_structure_fn, + make_dict_unstructure_fn, + make_hetero_tuple_unstructure_fn, + make_iterable_unstructure_fn, + make_mapping_structure_fn, + make_mapping_unstructure_fn, +) + +NoneType = type(None) +T = TypeVar("T") +V = TypeVar("V") + + +class UnstructureStrategy(Enum): + """`attrs` classes unstructuring strategies.""" + + AS_DICT = "asdict" + AS_TUPLE = "astuple" + + +def _subclass(typ: Type) -> Callable[[Type], bool]: + """a shortcut""" + return lambda cls: issubclass(cls, typ) + + +def is_attrs_union(typ: Type) -> bool: + return is_union_type(typ) and all(has(get_origin(e) or e) for e in typ.__args__) + + +def is_attrs_union_or_none(typ: Type) -> bool: + return is_union_type(typ) and all( + e is NoneType or has(get_origin(e) or e) for e in typ.__args__ + ) + + +def is_optional(typ: Type) -> bool: + return is_union_type(typ) and NoneType in typ.__args__ and len(typ.__args__) == 2 + + +def is_literal_containing_enums(typ: Type) -> bool: + return is_literal(typ) and any(isinstance(val, Enum) for val in typ.__args__) + + +class BaseConverter: + """Converts between structured and unstructured data.""" + + __slots__ = ( + "_dis_func_cache", + "_unstructure_func", + "_unstructure_attrs", + "_structure_attrs", + "_dict_factory", + "_union_struct_registry", + "_structure_func", + "_prefer_attrib_converters", + "detailed_validation", + ) + + def __init__( + self, + dict_factory: Callable[[], Any] = dict, + unstruct_strat: UnstructureStrategy = UnstructureStrategy.AS_DICT, + prefer_attrib_converters: bool = False, + detailed_validation: bool = True, + ) -> None: + unstruct_strat = UnstructureStrategy(unstruct_strat) + self._prefer_attrib_converters = prefer_attrib_converters + + self.detailed_validation = detailed_validation + + # Create a per-instance cache. + if unstruct_strat is UnstructureStrategy.AS_DICT: + self._unstructure_attrs = self.unstructure_attrs_asdict + self._structure_attrs = self.structure_attrs_fromdict + else: + self._unstructure_attrs = self.unstructure_attrs_astuple + self._structure_attrs = self.structure_attrs_fromtuple + + self._dis_func_cache = lru_cache()(self._get_dis_func) + + self._unstructure_func = MultiStrategyDispatch(self._unstructure_identity) + self._unstructure_func.register_cls_list( + [(bytes, self._unstructure_identity), (str, self._unstructure_identity)] + ) + self._unstructure_func.register_func_list( + [ + ( + is_protocol, + lambda o: self.unstructure(o, unstructure_as=o.__class__), + ), + (is_mapping, self._unstructure_mapping), + (is_sequence, self._unstructure_seq), + (is_mutable_set, self._unstructure_seq), + (is_frozenset, self._unstructure_seq), + (_subclass(Enum), self._unstructure_enum), + (has, self._unstructure_attrs), + (is_union_type, self._unstructure_union), + ] + ) + + # Per-instance register of to-attrs converters. + # Singledispatch dispatches based on the first argument, so we + # store the function and switch the arguments in self.loads. + self._structure_func = MultiStrategyDispatch(BaseConverter._structure_error) + self._structure_func.register_func_list( + [ + (lambda cl: cl is Any or cl is Optional or cl is None, lambda v, _: v), + (is_generic_attrs, self._gen_structure_generic, True), + (lambda t: get_newtype_base(t) is not None, self._structure_newtype), + (is_literal, self._structure_simple_literal), + (is_literal_containing_enums, self._structure_enum_literal), + (is_sequence, self._structure_list), + (is_mutable_set, self._structure_set), + (is_frozenset, self._structure_frozenset), + (is_tuple, self._structure_tuple), + (is_mapping, self._structure_dict), + (is_attrs_union_or_none, self._gen_attrs_union_structure, True), + ( + lambda t: is_union_type(t) and t in self._union_struct_registry, + self._structure_union, + ), + (is_optional, self._structure_optional), + (has, self._structure_attrs), + ] + ) + # Strings are sequences. + self._structure_func.register_cls_list( + [ + (str, self._structure_call), + (bytes, self._structure_call), + (int, self._structure_call), + (float, self._structure_call), + (Enum, self._structure_call), + ] + ) + + self._dict_factory = dict_factory + + # Unions are instances now, not classes. We use different registries. + self._union_struct_registry: Dict[Any, Callable[[Any, Type[T]], T]] = {} + + def unstructure(self, obj: Any, unstructure_as: Any = None) -> Any: + return self._unstructure_func.dispatch( + obj.__class__ if unstructure_as is None else unstructure_as + )(obj) + + @property + def unstruct_strat(self) -> UnstructureStrategy: + """The default way of unstructuring ``attrs`` classes.""" + return ( + UnstructureStrategy.AS_DICT + if self._unstructure_attrs == self.unstructure_attrs_asdict + else UnstructureStrategy.AS_TUPLE + ) + + def register_unstructure_hook(self, cls: Any, func: Callable[[Any], Any]) -> None: + """Register a class-to-primitive converter function for a class. + + The converter function should take an instance of the class and return + its Python equivalent. + """ + if attrs_has(cls): + resolve_types(cls) + if is_union_type(cls): + self._unstructure_func.register_func_list([(lambda t: t == cls, func)]) + elif get_newtype_base(cls) is not None: + # This is a newtype, so we handle it specially. + self._unstructure_func.register_func_list([(lambda t: t is cls, func)]) + else: + self._unstructure_func.register_cls_list([(cls, func)]) + + def register_unstructure_hook_func( + self, check_func: Callable[[Any], bool], func: Callable[[Any], Any] + ) -> None: + """Register a class-to-primitive converter function for a class, using + a function to check if it's a match. + """ + self._unstructure_func.register_func_list([(check_func, func)]) + + def register_unstructure_hook_factory( + self, + predicate: Callable[[Any], bool], + factory: Callable[[Any], Callable[[Any], Any]], + ) -> None: + """ + Register a hook factory for a given predicate. + + A predicate is a function that, given a type, returns whether the factory + can produce a hook for that type. + + A factory is a callable that, given a type, produces an unstructuring + hook for that type. This unstructuring hook will be cached. + """ + self._unstructure_func.register_func_list([(predicate, factory, True)]) + + def register_structure_hook( + self, cl: Any, func: Callable[[Any, Type[T]], T] + ) -> None: + """Register a primitive-to-class converter function for a type. + + The converter function should take two arguments: + * a Python object to be converted, + * the type to convert to + + and return the instance of the class. The type may seem redundant, but + is sometimes needed (for example, when dealing with generic classes). + """ + if attrs_has(cl): + resolve_types(cl) + if is_union_type(cl): + self._union_struct_registry[cl] = func + self._structure_func.clear_cache() + elif get_newtype_base(cl) is not None: + # This is a newtype, so we handle it specially. + self._structure_func.register_func_list([(lambda t: t is cl, func)]) + else: + self._structure_func.register_cls_list([(cl, func)]) + + def register_structure_hook_func( + self, check_func: Callable[[Type[T]], bool], func: Callable[[Any, Type[T]], T] + ) -> None: + """Register a class-to-primitive converter function for a class, using + a function to check if it's a match. + """ + self._structure_func.register_func_list([(check_func, func)]) + + def register_structure_hook_factory( + self, + predicate: Callable[[Any], bool], + factory: Callable[[Any], Callable[[Any, Any], Any]], + ) -> None: + """ + Register a hook factory for a given predicate. + + A predicate is a function that, given a type, returns whether the factory + can produce a hook for that type. + + A factory is a callable that, given a type, produces a structuring + hook for that type. This structuring hook will be cached. + """ + self._structure_func.register_func_list([(predicate, factory, True)]) + + def structure(self, obj: Any, cl: Type[T]) -> T: + """Convert unstructured Python data structures to structured data.""" + + return self._structure_func.dispatch(cl)(obj, cl) + + # Classes to Python primitives. + def unstructure_attrs_asdict(self, obj: Any) -> Dict[str, Any]: + """Our version of `attrs.asdict`, so we can call back to us.""" + attrs = fields(obj.__class__) + dispatch = self._unstructure_func.dispatch + rv = self._dict_factory() + for a in attrs: + name = a.name + v = getattr(obj, name) + rv[name] = dispatch(a.type or v.__class__)(v) + return rv + + def unstructure_attrs_astuple(self, obj: Any) -> Tuple[Any, ...]: + """Our version of `attrs.astuple`, so we can call back to us.""" + attrs = fields(obj.__class__) + dispatch = self._unstructure_func.dispatch + res = list() + for a in attrs: + name = a.name + v = getattr(obj, name) + res.append(dispatch(a.type or v.__class__)(v)) + return tuple(res) + + def _unstructure_enum(self, obj: Enum) -> Any: + """Convert an enum to its value.""" + return obj.value + + @staticmethod + def _unstructure_identity(obj: T) -> T: + """Just pass it through.""" + return obj + + def _unstructure_seq(self, seq: Sequence[T]) -> Sequence[T]: + """Convert a sequence to primitive equivalents.""" + # We can reuse the sequence class, so tuples stay tuples. + dispatch = self._unstructure_func.dispatch + return seq.__class__(dispatch(e.__class__)(e) for e in seq) + + def _unstructure_mapping(self, mapping: Mapping[T, V]) -> Mapping[T, V]: + """Convert a mapping of attr classes to primitive equivalents.""" + + # We can reuse the mapping class, so dicts stay dicts and OrderedDicts + # stay OrderedDicts. + dispatch = self._unstructure_func.dispatch + return mapping.__class__( + (dispatch(k.__class__)(k), dispatch(v.__class__)(v)) + for k, v in mapping.items() + ) + + # note: Use UnionType when 3.11 is released as + # the behaviour of @final is changed. This would + # affect how we can support UnionType in ._compat.py + def _unstructure_union(self, obj: Any) -> Any: + """ + Unstructure an object as a union. + + By default, just unstructures the instance. + """ + return self._unstructure_func.dispatch(obj.__class__)(obj) + + # Python primitives to classes. + + @staticmethod + def _structure_error(_, cl: Type) -> NoReturn: + """At the bottom of the condition stack, we explode if we can't handle it.""" + msg = "Unsupported type: {0!r}. Register a structure hook for " "it.".format(cl) + raise StructureHandlerNotFoundError(msg, type_=cl) + + def _gen_structure_generic(self, cl: Type[T]) -> DictStructureFn[T]: + """Create and return a hook for structuring generics.""" + fn = make_dict_structure_fn( + cl, self, _cattrs_prefer_attrib_converters=self._prefer_attrib_converters + ) + return fn + + def _gen_attrs_union_structure( + self, cl: Any + ) -> Callable[[Any, Type[T]], Optional[Type[T]]]: + """Generate a structuring function for a union of attrs classes (and maybe None).""" + dis_fn = self._get_dis_func(cl) + has_none = NoneType in cl.__args__ + + if has_none: + + def structure_attrs_union(obj, _): + if obj is None: + return None + return self.structure(obj, dis_fn(obj)) + + else: + + def structure_attrs_union(obj, _): + return self.structure(obj, dis_fn(obj)) + + return structure_attrs_union + + @staticmethod + def _structure_call(obj: Any, cl: Type[T]) -> Any: + """Just call ``cl`` with the given ``obj``. + + This is just an optimization on the ``_structure_default`` case, when + we know we can skip the ``if`` s. Use for ``str``, ``bytes``, ``enum``, + etc. + """ + return cl(obj) + + @staticmethod + def _structure_simple_literal(val, type): + if val not in type.__args__: + raise Exception(f"{val} not in literal {type}") + return val + + @staticmethod + def _structure_enum_literal(val, type): + vals = {(x.value if isinstance(x, Enum) else x): x for x in type.__args__} + try: + return vals[val] + except KeyError: + raise Exception(f"{val} not in literal {type}") from None + + def _structure_newtype(self, val, type): + base = get_newtype_base(type) + return self._structure_func.dispatch(base)(val, base) + + # Attrs classes. + + def structure_attrs_fromtuple(self, obj: Tuple[Any, ...], cl: Type[T]) -> T: + """Load an attrs class from a sequence (tuple).""" + conv_obj = [] # A list of converter parameters. + for a, value in zip(fields(cl), obj): + # We detect the type by the metadata. + converted = self._structure_attribute(a, value) + conv_obj.append(converted) + + return cl(*conv_obj) + + def _structure_attribute(self, a: Union[Attribute, Field], value: Any) -> Any: + """Handle an individual attrs attribute.""" + type_ = a.type + attrib_converter = getattr(a, "converter", None) + if self._prefer_attrib_converters and attrib_converter: + # A attrib converter is defined on this attribute, and prefer_attrib_converters is set + # to give these priority over registered structure hooks. So, pass through the raw + # value, which attrs will flow into the converter + return value + if type_ is None: + # No type metadata. + return value + + try: + return self._structure_func.dispatch(type_)(value, type_) + except StructureHandlerNotFoundError: + if attrib_converter: + # Return the original value and fallback to using an attrib converter. + return value + else: + raise + + def structure_attrs_fromdict(self, obj: Mapping[str, Any], cl: Type[T]) -> T: + """Instantiate an attrs class from a mapping (dict).""" + # For public use. + + conv_obj = {} # Start with a fresh dict, to ignore extra keys. + for a in fields(cl): + name = a.name + + try: + val = obj[name] + except KeyError: + continue + + if name[0] == "_": + name = name[1:] + + conv_obj[name] = self._structure_attribute(a, val) + + return cl(**conv_obj) + + def _structure_list(self, obj: Iterable[T], cl: Any) -> List[T]: + """Convert an iterable to a potentially generic list.""" + if is_bare(cl) or cl.__args__[0] is Any: + res = [e for e in obj] + else: + elem_type = cl.__args__[0] + handler = self._structure_func.dispatch(elem_type) + if self.detailed_validation: + errors = [] + res = [] + ix = 0 # Avoid `enumerate` for performance. + for e in obj: + try: + res.append(handler(e, elem_type)) + except Exception as e: + msg = f"Structuring {cl} @ index {ix}" + e.__notes__ = getattr(e, "__notes__", []) + [msg] + errors.append(e) + finally: + ix += 1 + if errors: + raise IterableValidationError( + f"While structuring {cl!r}", errors, cl + ) + else: + res = [handler(e, elem_type) for e in obj] + return res + + def _structure_set( + self, obj: Iterable[T], cl: Any, structure_to: type = set + ) -> Set[T]: + """Convert an iterable into a potentially generic set.""" + if is_bare(cl) or cl.__args__[0] is Any: + return structure_to(obj) + elem_type = cl.__args__[0] + handler = self._structure_func.dispatch(elem_type) + if self.detailed_validation: + errors = [] + res = set() + for e in obj: + try: + res.add(handler(e, elem_type)) + except Exception as exc: + msg = f"Structuring {structure_to.__name__} @ element {e!r}" + exc.__notes__ = getattr(e, "__notes__", []) + [msg] + errors.append(exc) + if errors: + raise IterableValidationError(f"While structuring {cl!r}", errors, cl) + return res if structure_to is set else structure_to(res) + elif structure_to is set: + return {handler(e, elem_type) for e in obj} + else: + return structure_to([handler(e, elem_type) for e in obj]) + + def _structure_frozenset( + self, obj: Iterable[T], cl: Any + ) -> FrozenSetSubscriptable[T]: + """Convert an iterable into a potentially generic frozenset.""" + return self._structure_set(obj, cl, structure_to=frozenset) + + def _structure_dict(self, obj: Mapping[T, V], cl: Any) -> Dict[T, V]: + """Convert a mapping into a potentially generic dict.""" + if is_bare(cl) or cl.__args__ == (Any, Any): + return dict(obj) + else: + key_type, val_type = cl.__args__ + if key_type is Any: + val_conv = self._structure_func.dispatch(val_type) + return {k: val_conv(v, val_type) for k, v in obj.items()} + elif val_type is Any: + key_conv = self._structure_func.dispatch(key_type) + return {key_conv(k, key_type): v for k, v in obj.items()} + else: + key_conv = self._structure_func.dispatch(key_type) + val_conv = self._structure_func.dispatch(val_type) + return { + key_conv(k, key_type): val_conv(v, val_type) for k, v in obj.items() + } + + def _structure_optional(self, obj, union): + if obj is None: + return None + union_params = union.__args__ + other = union_params[0] if union_params[1] is NoneType else union_params[1] + # We can't actually have a Union of a Union, so this is safe. + return self._structure_func.dispatch(other)(obj, other) + + def _structure_union(self, obj, union): + """Deal with structuring a union.""" + handler = self._union_struct_registry[union] + return handler(obj, union) + + def _structure_tuple(self, obj: Any, tup: Type[T]) -> T: + """Deal with structuring into a tuple.""" + if tup in (Tuple, tuple): + tup_params = None + else: + tup_params = tup.__args__ + has_ellipsis = tup_params and tup_params[-1] is Ellipsis + if tup_params is None or (has_ellipsis and tup_params[0] is Any): + # Just a Tuple. (No generic information.) + return tuple(obj) + if has_ellipsis: + # We're dealing with a homogenous tuple, Tuple[int, ...] + tup_type = tup_params[0] + conv = self._structure_func.dispatch(tup_type) + if self.detailed_validation: + errors = [] + res = [] + for ix, e in enumerate(obj): + try: + res.append(conv(e, tup_type)) + except Exception as exc: + msg = f"Structuring {tup} @ index {ix}" + exc.__notes__ = getattr(e, "__notes__", []) + [msg] + errors.append(exc) + if errors: + raise IterableValidationError( + f"While structuring {tup!r}", errors, tup + ) + return tuple(res) + else: + return tuple(conv(e, tup_type) for e in obj) + else: + # We're dealing with a heterogenous tuple. + exp_len = len(tup_params) + try: + len_obj = len(obj) + except TypeError: + pass # most likely an unsized iterator, eg generator + else: + if len_obj > exp_len: + exp_len = len_obj + if self.detailed_validation: + errors = [] + res = [] + for ix, (t, e) in enumerate(zip(tup_params, obj)): + try: + conv = self._structure_func.dispatch(t) + res.append(conv(e, t)) + except Exception as exc: + msg = f"Structuring {tup} @ index {ix}" + exc.__notes__ = getattr(e, "__notes__", []) + [msg] + errors.append(exc) + if len(res) < exp_len: + problem = "Not enough" if len(res) < len(tup_params) else "Too many" + exc = ValueError( + f"{problem} values in {obj!r} to structure as {tup!r}" + ) + msg = f"Structuring {tup}" + exc.__notes__ = getattr(e, "__notes__", []) + [msg] + errors.append(exc) + if errors: + raise IterableValidationError( + f"While structuring {tup!r}", errors, tup + ) + return tuple(res) + else: + res = tuple( + [ + self._structure_func.dispatch(t)(e, t) + for t, e in zip(tup_params, obj) + ] + ) + if len(res) < exp_len: + problem = "Not enough" if len(res) < len(tup_params) else "Too many" + raise ValueError( + f"{problem} values in {obj!r} to structure as {tup!r}" + ) + return res + + @staticmethod + def _get_dis_func(union) -> Callable[..., Type]: + """Fetch or try creating a disambiguation function for a union.""" + union_types = union.__args__ + if NoneType in union_types: # type: ignore + # We support unions of attrs classes and NoneType higher in the + # logic. + union_types = tuple( + e for e in union_types if e is not NoneType # type: ignore + ) + + if not all(has(get_origin(e) or e) for e in union_types): + raise StructureHandlerNotFoundError( + "Only unions of attrs classes supported " + "currently. Register a loads hook manually.", + type_=union, + ) + return create_uniq_field_dis_func(*union_types) + + def __deepcopy__(self, _) -> "BaseConverter": + return self.copy() + + def copy( + self, + dict_factory: Optional[Callable[[], Any]] = None, + unstruct_strat: Optional[UnstructureStrategy] = None, + prefer_attrib_converters: Optional[bool] = None, + detailed_validation: Optional[bool] = None, + ) -> "BaseConverter": + res = self.__class__( + dict_factory if dict_factory is not None else self._dict_factory, + unstruct_strat + if unstruct_strat is not None + else ( + UnstructureStrategy.AS_DICT + if self._unstructure_attrs == self.unstructure_attrs_asdict + else UnstructureStrategy.AS_TUPLE + ), + prefer_attrib_converters + if prefer_attrib_converters is not None + else self._prefer_attrib_converters, + detailed_validation + if detailed_validation is not None + else self.detailed_validation, + ) + + self._unstructure_func.copy_to(res._unstructure_func) + self._structure_func.copy_to(res._structure_func) + + return res + + +class Converter(BaseConverter): + """A converter which generates specialized un/structuring functions.""" + + __slots__ = ( + "omit_if_default", + "forbid_extra_keys", + "type_overrides", + "_unstruct_collection_overrides", + "_struct_copy_skip", + "_unstruct_copy_skip", + ) + + def __init__( + self, + dict_factory: Callable[[], Any] = dict, + unstruct_strat: UnstructureStrategy = UnstructureStrategy.AS_DICT, + omit_if_default: bool = False, + forbid_extra_keys: bool = False, + type_overrides: Mapping[Type, AttributeOverride] = {}, + unstruct_collection_overrides: Mapping[Type, Callable] = {}, + prefer_attrib_converters: bool = False, + detailed_validation: bool = True, + ): + super().__init__( + dict_factory=dict_factory, + unstruct_strat=unstruct_strat, + prefer_attrib_converters=prefer_attrib_converters, + detailed_validation=detailed_validation, + ) + self.omit_if_default = omit_if_default + self.forbid_extra_keys = forbid_extra_keys + self.type_overrides = dict(type_overrides) + + unstruct_collection_overrides = { + get_origin(k) or k: v for k, v in unstruct_collection_overrides.items() + } + + self._unstruct_collection_overrides = unstruct_collection_overrides + + # Do a little post-processing magic to make things easier for users. + co = unstruct_collection_overrides + + # abc.Set overrides, if defined, apply to abc.MutableSets and sets + if OriginAbstractSet in co: + if OriginMutableSet not in co: + co[OriginMutableSet] = co[OriginAbstractSet] + co[AbcMutableSet] = co[OriginAbstractSet] # For 3.7/3.8 compatibility. + if FrozenSetSubscriptable not in co: + co[FrozenSetSubscriptable] = co[OriginAbstractSet] + + # abc.MutableSet overrrides, if defined, apply to sets + if OriginMutableSet in co: + if set not in co: + co[set] = co[OriginMutableSet] + + if FrozenSetSubscriptable in co: + co[frozenset] = co[FrozenSetSubscriptable] # For 3.7/3.8 compatibility. + + # abc.Sequence overrides, if defined, can apply to MutableSequences, lists and tuples + if Sequence in co: + if MutableSequence not in co: + co[MutableSequence] = co[Sequence] + if tuple not in co: + co[tuple] = co[Sequence] + + # abc.MutableSequence overrides, if defined, can apply to lists + if MutableSequence in co: + if list not in co: + co[list] = co[MutableSequence] + + # abc.Mapping overrides, if defined, can apply to MutableMappings + if Mapping in co: + if MutableMapping not in co: + co[MutableMapping] = co[Mapping] + + # abc.MutableMapping overrides, if defined, can apply to dicts + if MutableMapping in co: + if dict not in co: + co[dict] = co[MutableMapping] + + # builtins.dict overrides, if defined, can apply to counters + if dict in co: + if Counter not in co: + co[Counter] = co[dict] + + if unstruct_strat is UnstructureStrategy.AS_DICT: + # Override the attrs handler. + self.register_unstructure_hook_factory( + has_with_generic, self.gen_unstructure_attrs_fromdict + ) + self.register_structure_hook_factory( + has_with_generic, self.gen_structure_attrs_fromdict + ) + self.register_unstructure_hook_factory( + is_annotated, self.gen_unstructure_annotated + ) + self.register_unstructure_hook_factory( + is_hetero_tuple, self.gen_unstructure_hetero_tuple + ) + self.register_unstructure_hook_factory( + is_sequence, self.gen_unstructure_iterable + ) + self.register_unstructure_hook_factory(is_mapping, self.gen_unstructure_mapping) + self.register_unstructure_hook_factory( + is_mutable_set, + lambda cl: self.gen_unstructure_iterable(cl, unstructure_to=set), + ) + self.register_unstructure_hook_factory( + is_frozenset, + lambda cl: self.gen_unstructure_iterable(cl, unstructure_to=frozenset), + ) + self.register_unstructure_hook_factory( + lambda t: get_newtype_base(t) is not None, + lambda t: self._unstructure_func.dispatch(get_newtype_base(t)), + ) + self.register_structure_hook_factory(is_annotated, self.gen_structure_annotated) + self.register_structure_hook_factory(is_mapping, self.gen_structure_mapping) + self.register_structure_hook_factory(is_counter, self.gen_structure_counter) + self.register_structure_hook_factory( + lambda t: get_newtype_base(t) is not None, self.get_structure_newtype + ) + + # We keep these so we can more correctly copy the hooks. + self._struct_copy_skip = self._structure_func.get_num_fns() + self._unstruct_copy_skip = self._unstructure_func.get_num_fns() + + def get_structure_newtype(self, type: Type[T]) -> Callable[[Any, Any], T]: + base = get_newtype_base(type) + handler = self._structure_func.dispatch(base) + return lambda v, _: handler(v, base) + + def gen_unstructure_annotated(self, type): + origin = type.__origin__ + h = self._unstructure_func.dispatch(origin) + return h + + def gen_structure_annotated(self, type): + origin = type.__origin__ + h = self._structure_func.dispatch(origin) + return h + + def gen_unstructure_attrs_fromdict( + self, cl: Type[T] + ) -> Callable[[T], Dict[str, Any]]: + origin = get_origin(cl) + attribs = fields(origin or cl) + if attrs_has(cl) and any(isinstance(a.type, str) for a in attribs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + attrib_overrides = { + a.name: self.type_overrides[a.type] + for a in attribs + if a.type in self.type_overrides + } + + h = make_dict_unstructure_fn( + cl, self, _cattrs_omit_if_default=self.omit_if_default, **attrib_overrides + ) + return h + + def gen_structure_attrs_fromdict( + self, cl: Type[T] + ) -> Callable[[Mapping[str, Any], Any], T]: + attribs = fields(get_origin(cl) if is_generic(cl) else cl) + if attrs_has(cl) and any(isinstance(a.type, str) for a in attribs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + attrib_overrides = { + a.name: self.type_overrides[a.type] + for a in attribs + if a.type in self.type_overrides + } + h = make_dict_structure_fn( + cl, + self, + _cattrs_forbid_extra_keys=self.forbid_extra_keys, + _cattrs_prefer_attrib_converters=self._prefer_attrib_converters, + _cattrs_detailed_validation=self.detailed_validation, + **attrib_overrides, + ) + # only direct dispatch so that subclasses get separately generated + return h + + def gen_unstructure_iterable( + self, cl: Any, unstructure_to: Any = None + ) -> IterableUnstructureFn: + unstructure_to = self._unstruct_collection_overrides.get( + get_origin(cl) or cl, unstructure_to or list + ) + h = make_iterable_unstructure_fn(cl, self, unstructure_to=unstructure_to) + self._unstructure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_unstructure_hetero_tuple( + self, cl: Any, unstructure_to: Any = None + ) -> HeteroTupleUnstructureFn: + unstructure_to = self._unstruct_collection_overrides.get( + get_origin(cl) or cl, unstructure_to or list + ) + h = make_hetero_tuple_unstructure_fn(cl, self, unstructure_to=unstructure_to) + self._unstructure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_unstructure_mapping( + self, + cl: Any, + unstructure_to: Any = None, + key_handler: Optional[Callable[[Any, Optional[Any]], Any]] = None, + ) -> MappingUnstructureFn: + unstructure_to = self._unstruct_collection_overrides.get( + get_origin(cl) or cl, unstructure_to or dict + ) + h = make_mapping_unstructure_fn( + cl, self, unstructure_to=unstructure_to, key_handler=key_handler + ) + self._unstructure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_structure_counter(self, cl: Any) -> MappingStructureFn[T]: + h = make_mapping_structure_fn( + cl, + self, + structure_to=Counter, + val_type=int, + detailed_validation=self.detailed_validation, + ) + self._structure_func.register_cls_list([(cl, h)], direct=True) + return h + + def gen_structure_mapping(self, cl: Any) -> MappingStructureFn[T]: + h = make_mapping_structure_fn( + cl, self, detailed_validation=self.detailed_validation + ) + self._structure_func.register_cls_list([(cl, h)], direct=True) + return h + + def copy( + self, + dict_factory: Optional[Callable[[], Any]] = None, + unstruct_strat: Optional[UnstructureStrategy] = None, + omit_if_default: Optional[bool] = None, + forbid_extra_keys: Optional[bool] = None, + type_overrides: Optional[Mapping[Type, AttributeOverride]] = None, + unstruct_collection_overrides: Optional[Mapping[Type, Callable]] = None, + prefer_attrib_converters: Optional[bool] = None, + detailed_validation: Optional[bool] = None, + ) -> "Converter": + res = self.__class__( + dict_factory if dict_factory is not None else self._dict_factory, + unstruct_strat + if unstruct_strat is not None + else ( + UnstructureStrategy.AS_DICT + if self._unstructure_attrs == self.unstructure_attrs_asdict + else UnstructureStrategy.AS_TUPLE + ), + omit_if_default if omit_if_default is not None else self.omit_if_default, + forbid_extra_keys + if forbid_extra_keys is not None + else self.forbid_extra_keys, + type_overrides if type_overrides is not None else self.type_overrides, + unstruct_collection_overrides + if unstruct_collection_overrides is not None + else self._unstruct_collection_overrides, + prefer_attrib_converters + if prefer_attrib_converters is not None + else self._prefer_attrib_converters, + detailed_validation + if detailed_validation is not None + else self.detailed_validation, + ) + + self._unstructure_func.copy_to( + res._unstructure_func, skip=self._unstruct_copy_skip + ) + self._structure_func.copy_to(res._structure_func, skip=self._struct_copy_skip) + + return res + + +GenConverter = Converter diff --git a/pythonFiles/lib/jedilsp/cattrs/disambiguators.py b/pythonFiles/lib/jedilsp/cattrs/disambiguators.py new file mode 100644 index 0000000000000..2b36fc542b380 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/disambiguators.py @@ -0,0 +1,59 @@ +"""Utilities for union (sum type) disambiguation.""" +from collections import OrderedDict +from functools import reduce +from operator import or_ +from typing import Any, Callable, Dict, Mapping, Optional, Type + +from attr import NOTHING, fields + +from cattrs._compat import get_origin + + +def create_uniq_field_dis_func( + *classes: Type[Any], +) -> Callable[[Mapping[Any, Any]], Optional[Type[Any]]]: + """Given attr classes, generate a disambiguation function. + + The function is based on unique fields.""" + if len(classes) < 2: + raise ValueError("At least two classes required.") + cls_and_attrs = [ + (cl, set(at.name for at in fields(get_origin(cl) or cl))) for cl in classes + ] + if len([attrs for _, attrs in cls_and_attrs if len(attrs) == 0]) > 1: + raise ValueError("At least two classes have no attributes.") + # TODO: Deal with a single class having no required attrs. + # For each class, attempt to generate a single unique required field. + uniq_attrs_dict: Dict[str, Type] = OrderedDict() + cls_and_attrs.sort(key=lambda c_a: -len(c_a[1])) + + fallback = None # If none match, try this. + + for i, (cl, cl_reqs) in enumerate(cls_and_attrs): + other_classes = cls_and_attrs[i + 1 :] + if other_classes: + other_reqs = reduce(or_, (c_a[1] for c_a in other_classes)) + uniq = cl_reqs - other_reqs + if not uniq: + m = "{} has no usable unique attributes.".format(cl) + raise ValueError(m) + # We need a unique attribute with no default. + cl_fields = fields(get_origin(cl) or cl) + for attr_name in uniq: + if getattr(cl_fields, attr_name).default is NOTHING: + break + else: + raise ValueError(f"{cl} has no usable non-default attributes.") + uniq_attrs_dict[attr_name] = cl + else: + fallback = cl + + def dis_func(data: Mapping[Any, Any]) -> Optional[Type]: + if not isinstance(data, Mapping): + raise ValueError("Only input mappings are supported.") + for k, v in uniq_attrs_dict.items(): + if k in data: + return v + return fallback + + return dis_func diff --git a/pythonFiles/lib/jedilsp/cattrs/dispatch.py b/pythonFiles/lib/jedilsp/cattrs/dispatch.py new file mode 100644 index 0000000000000..debd84f27d780 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/dispatch.py @@ -0,0 +1,142 @@ +from functools import lru_cache, singledispatch +from typing import Any, Callable, List, Tuple, Union + +import attr + +from .errors import StructureHandlerNotFoundError + + +@attr.s +class _DispatchNotFound: + """A dummy object to help signify a dispatch not found.""" + + pass + + +class MultiStrategyDispatch: + """ + MultiStrategyDispatch uses a combination of exact-match dispatch, + singledispatch, and FunctionDispatch. + """ + + __slots__ = ( + "_direct_dispatch", + "_function_dispatch", + "_single_dispatch", + "_generators", + "dispatch", + ) + + def __init__(self, fallback_func): + self._direct_dispatch = {} + self._function_dispatch = FunctionDispatch() + self._function_dispatch.register(lambda _: True, fallback_func) + self._single_dispatch = singledispatch(_DispatchNotFound) + self.dispatch = lru_cache(maxsize=None)(self._dispatch) + + def _dispatch(self, cl): + try: + dispatch = self._single_dispatch.dispatch(cl) + if dispatch is not _DispatchNotFound: + return dispatch + except Exception: + pass + + direct_dispatch = self._direct_dispatch.get(cl) + if direct_dispatch is not None: + return direct_dispatch + + return self._function_dispatch.dispatch(cl) + + def register_cls_list(self, cls_and_handler, direct: bool = False) -> None: + """Register a class to direct or singledispatch.""" + for cls, handler in cls_and_handler: + if direct: + self._direct_dispatch[cls] = handler + else: + self._single_dispatch.register(cls, handler) + self.clear_direct() + self.dispatch.cache_clear() + + def register_func_list( + self, + pred_and_handler: List[ + Union[ + Tuple[Callable[[Any], bool], Any], + Tuple[Callable[[Any], bool], Any, bool], + ] + ], + ): + """ + Register a predicate function to determine if the handle + should be used for the type. + """ + for tup in pred_and_handler: + if len(tup) == 2: + func, handler = tup + self._function_dispatch.register(func, handler) + else: + func, handler, is_gen = tup + self._function_dispatch.register(func, handler, is_generator=is_gen) + self.clear_direct() + self.dispatch.cache_clear() + + def clear_direct(self): + """Clear the direct dispatch.""" + self._direct_dispatch.clear() + + def clear_cache(self): + """Clear all caches.""" + self._direct_dispatch.clear() + self.dispatch.cache_clear() + + def get_num_fns(self) -> int: + return self._function_dispatch.get_num_fns() + + def copy_to(self, other: "MultiStrategyDispatch", skip: int = 0): + self._function_dispatch.copy_to(other._function_dispatch, skip=skip) + for cls, fn in self._single_dispatch.registry.items(): + other._single_dispatch.register(cls, fn) + + +@attr.s(slots=True) +class FunctionDispatch: + """ + FunctionDispatch is similar to functools.singledispatch, but + instead dispatches based on functions that take the type of the + first argument in the method, and return True or False. + + objects that help determine dispatch should be instantiated objects. + """ + + _handler_pairs: list = attr.ib(factory=list) + + def register(self, can_handle: Callable[[Any], bool], func, is_generator=False): + self._handler_pairs.insert(0, (can_handle, func, is_generator)) + + def dispatch(self, typ): + """ + returns the appropriate handler, for the object passed. + """ + for can_handle, handler, is_generator in self._handler_pairs: + # can handle could raise an exception here + # such as issubclass being called on an instance. + # it's easier to just ignore that case. + try: + ch = can_handle(typ) + except Exception: + continue + if ch: + if is_generator: + return handler(typ) + else: + return handler + raise StructureHandlerNotFoundError( + f"unable to find handler for {typ}", type_=typ + ) + + def get_num_fns(self) -> int: + return len(self._handler_pairs) + + def copy_to(self, other: "FunctionDispatch", skip: int = 0): + other._handler_pairs.extend(self._handler_pairs[skip:]) diff --git a/pythonFiles/lib/jedilsp/cattrs/errors.py b/pythonFiles/lib/jedilsp/cattrs/errors.py new file mode 100644 index 0000000000000..3ce67a2b5ff0a --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/errors.py @@ -0,0 +1,57 @@ +from typing import Optional, Set, Type + +from cattrs._compat import ExceptionGroup + + +class StructureHandlerNotFoundError(Exception): + """Error raised when structuring cannot find a handler for converting inputs into :attr:`type_`.""" + + def __init__(self, message: str, type_: Type) -> None: + super().__init__(message) + self.type_ = type_ + + +class BaseValidationError(ExceptionGroup): + cl: Type + + def __new__(cls, message, excs, cl: Type): + obj = super().__new__(cls, message, excs) + obj.cl = cl + return obj + + def derive(self, excs): + return ClassValidationError(self.message, excs, self.cl) + + +class IterableValidationError(BaseValidationError): + """Raised when structuring an iterable.""" + + pass + + +class ClassValidationError(BaseValidationError): + """Raised when validating a class if any attributes are invalid.""" + + pass + + +class ForbiddenExtraKeysError(Exception): + """Raised when `forbid_extra_keys` is activated and such extra keys are detected during structuring. + + The attribute `extra_fields` is a sequence of those extra keys, which were the cause of this error, + and `cl` is the class which was structured with those extra keys. + """ + + def __init__( + self, message: Optional[str], cl: Type, extra_fields: Set[str] + ) -> None: + self.cl = cl + self.extra_fields = extra_fields + + msg = ( + message + if message + else f"Extra fields in constructor for {cl.__name__}: {', '.join(extra_fields)}" + ) + + super().__init__(msg) diff --git a/pythonFiles/lib/jedilsp/cattrs/gen.py b/pythonFiles/lib/jedilsp/cattrs/gen.py new file mode 100644 index 0000000000000..23601f4730ffc --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/gen.py @@ -0,0 +1,793 @@ +import linecache +import re +import uuid +from dataclasses import is_dataclass +from threading import local +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + Iterable, + Mapping, + Optional, + Tuple, + Type, + TypeVar, +) + +import attr +from attr import NOTHING, frozen, resolve_types + +from cattrs.errors import ( + ClassValidationError, + ForbiddenExtraKeysError, + IterableValidationError, + StructureHandlerNotFoundError, +) + +from ._compat import ( + adapted_fields, + get_args, + get_origin, + is_annotated, + is_bare, + is_generic, +) +from ._generics import deep_copy_with + +if TYPE_CHECKING: # pragma: no cover + from cattr.converters import BaseConverter + + +@frozen +class AttributeOverride: + omit_if_default: Optional[bool] = None + rename: Optional[str] = None + omit: bool = False # Omit the field completely. + + +def override( + omit_if_default: Optional[bool] = None, + rename: Optional[str] = None, + omit: bool = False, +): + return AttributeOverride(omit_if_default=omit_if_default, rename=rename, omit=omit) + + +_neutral = AttributeOverride() +_already_generating = local() +T = TypeVar("T") + + +def make_dict_unstructure_fn( + cl: Type[T], + converter: "BaseConverter", + _cattrs_omit_if_default: bool = False, + _cattrs_use_linecache: bool = True, + **kwargs: AttributeOverride, +) -> Callable[[T], Dict[str, Any]]: + """ + Generate a specialized dict unstructuring function for an attrs class or a + dataclass. + """ + origin = get_origin(cl) + attrs = adapted_fields(origin or cl) # type: ignore + + if any(isinstance(a.type, str) for a in attrs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + + mapping = {} + if is_generic(cl): + mapping = _generate_mapping(cl, mapping) + + for base in getattr(origin, "__orig_bases__", ()): + if is_generic(base) and not str(base).startswith("typing.Generic"): + mapping = _generate_mapping(base, mapping) + break + cl = origin + + cl_name = cl.__name__ + fn_name = "unstructure_" + cl_name + globs = {} + lines = [] + invocation_lines = [] + internal_arg_parts = {} + + # We keep track of what we're generating to help with recursive + # class graphs. + try: + working_set = _already_generating.working_set + except AttributeError: + working_set = set() + _already_generating.working_set = working_set + if cl in working_set: + raise RecursionError() + else: + working_set.add(cl) + + try: + for a in attrs: + attr_name = a.name + override = kwargs.pop(attr_name, _neutral) + if override.omit: + continue + kn = attr_name if override.rename is None else override.rename + d = a.default + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + handler = None + if a.type is not None: + t = a.type + if isinstance(t, TypeVar): + if t.__name__ in mapping: + t = mapping[t.__name__] + else: + handler = converter.unstructure + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + if handler is None: + try: + handler = converter._unstructure_func.dispatch(t) + except RecursionError: + # There's a circular reference somewhere down the line + handler = converter.unstructure + else: + handler = converter.unstructure + + is_identity = handler == converter._unstructure_identity + + if not is_identity: + unstruct_handler_name = f"__c_unstr_{attr_name}" + globs[unstruct_handler_name] = handler + internal_arg_parts[unstruct_handler_name] = handler + invoke = f"{unstruct_handler_name}(instance.{attr_name})" + else: + invoke = f"instance.{attr_name}" + + if d is not attr.NOTHING and ( + (_cattrs_omit_if_default and override.omit_if_default is not False) + or override.omit_if_default + ): + def_name = f"__c_def_{attr_name}" + + if isinstance(d, attr.Factory): + globs[def_name] = d.factory + internal_arg_parts[def_name] = d.factory + if d.takes_self: + lines.append( + f" if instance.{attr_name} != {def_name}(instance):" + ) + else: + lines.append(f" if instance.{attr_name} != {def_name}():") + lines.append(f" res['{kn}'] = {invoke}") + else: + globs[def_name] = d + internal_arg_parts[def_name] = d + lines.append(f" if instance.{attr_name} != {def_name}:") + lines.append(f" res['{kn}'] = {invoke}") + + else: + # No default or no override. + invocation_lines.append(f"'{kn}': {invoke},") + + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + if internal_arg_line: + internal_arg_line = f", {internal_arg_line}" + for k, v in internal_arg_parts.items(): + globs[k] = v + + total_lines = ( + [f"def {fn_name}(instance{internal_arg_line}):"] + + [" res = {"] + + [f" {line}" for line in invocation_lines] + + [" }"] + + lines + + [" return res"] + ) + script = "\n".join(total_lines) + + fname = _generate_unique_filename( + cl, "unstructure", reserve=_cattrs_use_linecache + ) + + eval(compile(script, fname, "exec"), globs) + + fn = globs[fn_name] + if _cattrs_use_linecache: + linecache.cache[fname] = len(script), None, total_lines, fname + finally: + working_set.remove(cl) + + return fn + + +def _generate_mapping(cl: Type, old_mapping: Dict[str, type]) -> Dict[str, type]: + mapping = {} + + # To handle the cases where classes in the typing module are using + # the GenericAlias structure but aren’t a Generic and hence + # end up in this function but do not have an `__parameters__` + # attribute. These classes are interface types, for example + # `typing.Hashable`. + parameters = getattr(get_origin(cl), "__parameters__", None) + if parameters is None: + return old_mapping + + for p, t in zip(parameters, get_args(cl)): + if isinstance(t, TypeVar): + continue + mapping[p.__name__] = t + + if not mapping: + return old_mapping + + return mapping + + +DictStructureFn = Callable[[Mapping[str, Any], Any], T] + + +def make_dict_structure_fn( + cl: Type[T], + converter: "BaseConverter", + _cattrs_forbid_extra_keys: bool = False, + _cattrs_use_linecache: bool = True, + _cattrs_prefer_attrib_converters: bool = False, + _cattrs_detailed_validation: bool = True, + **kwargs: AttributeOverride, +) -> DictStructureFn[T]: + """Generate a specialized dict structuring function for an attrs class.""" + + mapping = {} + if is_generic(cl): + base = get_origin(cl) + mapping = _generate_mapping(cl, mapping) + cl = base + + for base in getattr(cl, "__orig_bases__", ()): + if is_generic(base) and not str(base).startswith("typing.Generic"): + mapping = _generate_mapping(base, mapping) + break + + if isinstance(cl, TypeVar): + cl = mapping.get(cl.__name__, cl) + + cl_name = cl.__name__ + fn_name = "structure_" + cl_name + + # We have generic parameters and need to generate a unique name for the function + for p in getattr(cl, "__parameters__", ()): + # This is nasty, I am not sure how best to handle `typing.List[str]` or `TClass[int, int]` as a parameter type here + try: + name_base = mapping[p.__name__] + except KeyError: + raise StructureHandlerNotFoundError( + f"Missing type for generic argument {p.__name__}, specify it when structuring.", + p, + ) from None + name = getattr(name_base, "__name__", None) or str(name_base) + name = re.sub(r"[\[\.\] ,]", "_", name) + fn_name += f"_{name}" + + internal_arg_parts = {"__cl": cl} + globs = {} + lines = [] + post_lines = [] + invocation_lines = [] + + attrs = adapted_fields(cl) + is_dc = is_dataclass(cl) + + if any(isinstance(a.type, str) for a in attrs): + # PEP 563 annotations - need to be resolved. + resolve_types(cl) + + allowed_fields = set() + if _cattrs_forbid_extra_keys: + globs["__c_a"] = allowed_fields + globs["__c_feke"] = ForbiddenExtraKeysError + + if _cattrs_detailed_validation: + lines.append(" res = {}") + lines.append(" errors = []") + invocation_lines.append("**res,") + internal_arg_parts["__c_cve"] = ClassValidationError + for a in attrs: + an = a.name + override = kwargs.get(an, _neutral) + if override.omit: + continue + t = a.type + if isinstance(t, TypeVar): + t = mapping.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if a.converter is not None and _cattrs_prefer_attrib_converters: + handler = None + elif ( + a.converter is not None + and not _cattrs_prefer_attrib_converters + and t is not None + ): + handler = converter._structure_func.dispatch(t) + if handler == converter._structure_error: + handler = None + elif t is not None: + handler = converter._structure_func.dispatch(t) + else: + handler = converter.structure + + struct_handler_name = f"__c_structure_{an}" + internal_arg_parts[struct_handler_name] = handler + + ian = an if (is_dc or an[0] != "_") else an[1:] + kn = an if override.rename is None else override.rename + allowed_fields.add(kn) + i = " " + if a.default is not NOTHING: + lines.append(f"{i}if '{kn}' in o:") + i = f"{i} " + lines.append(f"{i}try:") + i = f"{i} " + if handler: + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + lines.append(f"{i}res['{ian}'] = {struct_handler_name}(o['{kn}'])") + else: + type_name = f"__c_type_{an}" + internal_arg_parts[type_name] = t + lines.append( + f"{i}res['{ian}'] = {struct_handler_name}(o['{kn}'], {type_name})" + ) + else: + lines.append(f"{i}res['{ian}'] = o['{kn}']") + i = i[:-2] + lines.append(f"{i}except Exception as e:") + i = f"{i} " + lines.append( + f"{i}e.__notes__ = getattr(e, '__notes__', []) + [\"Structuring class {cl.__qualname__} @ attribute {an}\"]" + ) + lines.append(f"{i}errors.append(e)") + + if _cattrs_forbid_extra_keys: + post_lines += [ + " unknown_fields = set(o.keys()) - __c_a", + " if unknown_fields:", + " errors.append(__c_feke('', __cl, unknown_fields))", + ] + + post_lines.append( + f" if errors: raise __c_cve('While structuring ' + {cl.__name__!r}, errors, __cl)" + ) + instantiation_lines = ( + [" try:"] + + [" return __cl("] + + [f" {line}" for line in invocation_lines] + + [" )"] + + [ + f" except Exception as exc: raise __c_cve('While structuring ' + {cl.__name__!r}, [exc], __cl)" + ] + ) + else: + non_required = [] + # The first loop deals with required args. + for a in attrs: + an = a.name + override = kwargs.get(an, _neutral) + if override.omit: + continue + if a.default is not NOTHING: + non_required.append(a) + continue + t = a.type + if isinstance(t, TypeVar): + t = mapping.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if a.converter is not None and _cattrs_prefer_attrib_converters: + handler = None + elif ( + a.converter is not None + and not _cattrs_prefer_attrib_converters + and t is not None + ): + handler = converter._structure_func.dispatch(t) + if handler == converter._structure_error: + handler = None + elif t is not None: + handler = converter._structure_func.dispatch(t) + else: + handler = converter.structure + + kn = an if override.rename is None else override.rename + allowed_fields.add(kn) + + if handler: + struct_handler_name = f"__c_structure_{an}" + internal_arg_parts[struct_handler_name] = handler + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + invocation_line = f"{struct_handler_name}(o['{kn}'])," + else: + type_name = f"__c_type_{an}" + internal_arg_parts[type_name] = t + invocation_line = f"{struct_handler_name}(o['{kn}'], {type_name})," + else: + invocation_line = f"o['{kn}']," + + if a.kw_only: + ian = an if (is_dc or an[0] != "_") else an[1:] + invocation_line = f"{ian}={invocation_line}" + invocation_lines.append(invocation_line) + + # The second loop is for optional args. + if non_required: + invocation_lines.append("**res,") + lines.append(" res = {}") + + for a in non_required: + an = a.name + override = kwargs.get(an, _neutral) + t = a.type + if isinstance(t, TypeVar): + t = mapping.get(t.__name__, t) + elif is_generic(t) and not is_bare(t) and not is_annotated(t): + t = deep_copy_with(t, mapping) + + # For each attribute, we try resolving the type here and now. + # If a type is manually overwritten, this function should be + # regenerated. + if a.converter is not None and _cattrs_prefer_attrib_converters: + handler = None + elif ( + a.converter is not None + and not _cattrs_prefer_attrib_converters + and t is not None + ): + handler = converter._structure_func.dispatch(t) + if handler == converter._structure_error: + handler = None + elif t is not None: + handler = converter._structure_func.dispatch(t) + else: + handler = converter.structure + + struct_handler_name = f"__c_structure_{an}" + internal_arg_parts[struct_handler_name] = handler + + ian = an if (is_dc or an[0] != "_") else an[1:] + kn = an if override.rename is None else override.rename + allowed_fields.add(kn) + post_lines.append(f" if '{kn}' in o:") + if handler: + if handler == converter._structure_call: + internal_arg_parts[struct_handler_name] = t + post_lines.append( + f" res['{ian}'] = {struct_handler_name}(o['{kn}'])" + ) + else: + type_name = f"__c_type_{an}" + internal_arg_parts[type_name] = t + post_lines.append( + f" res['{ian}'] = {struct_handler_name}(o['{kn}'], {type_name})" + ) + else: + post_lines.append(f" res['{ian}'] = o['{kn}']") + instantiation_lines = ( + [" return __cl("] + [f" {line}" for line in invocation_lines] + [" )"] + ) + + if _cattrs_forbid_extra_keys: + post_lines += [ + " unknown_fields = set(o.keys()) - __c_a", + " if unknown_fields:", + " raise __c_feke('', __cl, unknown_fields)", + ] + + # At the end, we create the function header. + internal_arg_line = ", ".join([f"{i}={i}" for i in internal_arg_parts]) + for k, v in internal_arg_parts.items(): + globs[k] = v + + total_lines = ( + [f"def {fn_name}(o, _, *, {internal_arg_line}):"] + + lines + + post_lines + + instantiation_lines + ) + + fname = _generate_unique_filename(cl, "structure", reserve=_cattrs_use_linecache) + script = "\n".join(total_lines) + eval(compile(script, fname, "exec"), globs) + if _cattrs_use_linecache: + linecache.cache[fname] = len(script), None, total_lines, fname + + return globs[fn_name] + + +IterableUnstructureFn = Callable[[Iterable[Any]], Any] + + +def make_iterable_unstructure_fn( + cl: Any, converter: "BaseConverter", unstructure_to: Any = None +) -> IterableUnstructureFn: + """Generate a specialized unstructure function for an iterable.""" + handler = converter.unstructure + + fn_name = "unstructure_iterable" + + # Let's try fishing out the type args + # Unspecified tuples have `__args__` as empty tuples, so guard + # against IndexError. + if getattr(cl, "__args__", None) not in (None, ()): + type_arg = cl.__args__[0] + # We don't know how to handle the TypeVar on this level, + # so we skip doing the dispatch here. + if not isinstance(type_arg, TypeVar): + handler = converter._unstructure_func.dispatch(type_arg) + + globs = {"__cattr_seq_cl": unstructure_to or cl, "__cattr_u": handler} + lines = [] + + lines.append(f"def {fn_name}(iterable):") + lines.append(" res = __cattr_seq_cl(__cattr_u(i) for i in iterable)") + + total_lines = lines + [" return res"] + + eval(compile("\n".join(total_lines), "", "exec"), globs) + + fn = globs[fn_name] + + return fn + + +HeteroTupleUnstructureFn = Callable[[Tuple[Any, ...]], Any] + + +def make_hetero_tuple_unstructure_fn( + cl: Any, converter: "BaseConverter", unstructure_to: Any = None +) -> HeteroTupleUnstructureFn: + """Generate a specialized unstructure function for a heterogenous tuple.""" + fn_name = "unstructure_tuple" + + type_args = get_args(cl) + + # We can do the dispatch here and now. + handlers = [ + converter._unstructure_func.dispatch(type_arg) for type_arg in type_args + ] + + globs = {f"__cattr_u_{i}": h for i, h in enumerate(handlers)} + if unstructure_to is not tuple: + globs["__cattr_seq_cl"] = unstructure_to or cl + lines = [] + + lines.append(f"def {fn_name}(tup):") + if unstructure_to is not tuple: + lines.append(" res = __cattr_seq_cl((") + else: + lines.append(" res = (") + for i in range(len(handlers)): + if handlers[i] == converter._unstructure_identity: + lines.append(f" tup[{i}],") + else: + lines.append(f" __cattr_u_{i}(tup[{i}]),") + + if unstructure_to is not tuple: + lines.append(" ))") + else: + lines.append(" )") + + total_lines = lines + [" return res"] + + eval(compile("\n".join(total_lines), "", "exec"), globs) + + fn = globs[fn_name] + + return fn + + +MappingUnstructureFn = Callable[[Mapping[Any, Any]], Any] + + +def make_mapping_unstructure_fn( + cl: Any, + converter: "BaseConverter", + unstructure_to: Any = None, + key_handler: Optional[Callable[[Any, Optional[Any]], Any]] = None, +) -> MappingUnstructureFn: + """Generate a specialized unstructure function for a mapping.""" + kh = key_handler or converter.unstructure + val_handler = converter.unstructure + + fn_name = "unstructure_mapping" + + # Let's try fishing out the type args. + if getattr(cl, "__args__", None) is not None: + args = get_args(cl) + if len(args) == 2: + key_arg, val_arg = args + else: + # Probably a Counter + key_arg, val_arg = args, Any + # We can do the dispatch here and now. + kh = key_handler or converter._unstructure_func.dispatch(key_arg) + if kh == converter._unstructure_identity: + kh = None + + val_handler = converter._unstructure_func.dispatch(val_arg) + if val_handler == converter._unstructure_identity: + val_handler = None + + globs = { + "__cattr_mapping_cl": unstructure_to or cl, + "__cattr_k_u": kh, + "__cattr_v_u": val_handler, + } + + k_u = "__cattr_k_u(k)" if kh is not None else "k" + v_u = "__cattr_v_u(v)" if val_handler is not None else "v" + + lines = [] + + lines.append(f"def {fn_name}(mapping):") + lines.append( + f" res = __cattr_mapping_cl(({k_u}, {v_u}) for k, v in mapping.items())" + ) + + total_lines = lines + [" return res"] + + eval(compile("\n".join(total_lines), "", "exec"), globs) + + fn = globs[fn_name] + + return fn + + +MappingStructureFn = Callable[[Mapping[Any, Any], Any], T] + + +def make_mapping_structure_fn( + cl: Type[T], + converter: "BaseConverter", + structure_to: Type = dict, + key_type=NOTHING, + val_type=NOTHING, + detailed_validation: bool = True, +) -> MappingStructureFn[T]: + """Generate a specialized unstructure function for a mapping.""" + fn_name = "structure_mapping" + + globs: Dict[str, Type] = {"__cattr_mapping_cl": structure_to} + + lines = [] + lines.append(f"def {fn_name}(mapping, _):") + + # Let's try fishing out the type args. + if not is_bare(cl): + args = get_args(cl) + if len(args) == 2: + key_arg_cand, val_arg_cand = args + if key_type is NOTHING: + key_type = key_arg_cand + if val_type is NOTHING: + val_type = val_arg_cand + else: + if key_type is not NOTHING and val_type is NOTHING: + (val_type,) = args + elif key_type is NOTHING and val_type is not NOTHING: + (key_type,) = args + else: + # Probably a Counter + (key_type,) = args + val_type = Any + + is_bare_dict = val_type is Any and key_type is Any + if not is_bare_dict: + # We can do the dispatch here and now. + key_handler = converter._structure_func.dispatch(key_type) + if key_handler == converter._structure_call: + key_handler = key_type + + val_handler = converter._structure_func.dispatch(val_type) + if val_handler == converter._structure_call: + val_handler = val_type + + globs["__cattr_k_t"] = key_type + globs["__cattr_v_t"] = val_type + globs["__cattr_k_s"] = key_handler + globs["__cattr_v_s"] = val_handler + k_s = ( + "__cattr_k_s(k, __cattr_k_t)" + if key_handler != key_type + else "__cattr_k_s(k)" + ) + v_s = ( + "__cattr_v_s(v, __cattr_v_t)" + if val_handler != val_type + else "__cattr_v_s(v)" + ) + else: + is_bare_dict = True + + if is_bare_dict: + # No args, it's a bare dict. + lines.append(" res = dict(mapping)") + else: + if detailed_validation: + globs["enumerate"] = enumerate + globs["IterableValidationError"] = IterableValidationError + lines.append(" res = {}; errors = []") + lines.append(" for ix, (k, v) in enumerate(mapping.items()):") + lines.append(" try:") + lines.append(f" value = {v_s}") + lines.append(" except Exception as e:") + lines.append( + " e.__notes__ = getattr(e, '__notes__', []) + ['Structuring mapping value @ key ' + repr(k)]" + ) + lines.append(" errors.append(e)") + lines.append(" continue") + lines.append(" try:") + lines.append(f" key = {k_s}") + lines.append(" res[key] = value") + lines.append(" except Exception as e:") + lines.append( + " e.__notes__ = getattr(e, '__notes__', []) + ['Structuring mapping key @ key ' + repr(k)]" + ) + lines.append(" errors.append(e)") + lines.append(" if errors:") + lines.append( + f" raise IterableValidationError('While structuring ' + {repr(cl)!r}, errors, __cattr_mapping_cl)" + ) + else: + lines.append(f" res = {{{k_s}: {v_s} for k, v in mapping.items()}}") + if structure_to is not dict: + lines.append(" res = __cattr_mapping_cl(res)") + + total_lines = lines + [" return res"] + script = "\n".join(total_lines) + + eval(compile(script, "", "exec"), globs) + + fn = globs[fn_name] + + return fn + + +def _generate_unique_filename(cls: Any, func_name: str, reserve: bool = True) -> str: + """ + Create a "filename" suitable for a function being generated. + """ + unique_id = uuid.uuid4() + extra = "" + count = 1 + + while True: + unique_filename = "".format( + func_name, cls.__module__, getattr(cls, "__qualname__", cls.__name__), extra + ) + if not reserve: + return unique_filename + # To handle concurrency we essentially "reserve" our spot in + # the linecache with a dummy line. The caller can then + # set this value correctly. + cache_line = (1, None, (str(unique_id),), unique_filename) + if linecache.cache.setdefault(unique_filename, cache_line) == cache_line: + return unique_filename + + # Looks like this spot is taken. Try again. + count += 1 + extra = "-{0}".format(count) diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/__init__.py b/pythonFiles/lib/jedilsp/cattrs/preconf/__init__.py new file mode 100644 index 0000000000000..760ae115ab27b --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/__init__.py @@ -0,0 +1,7 @@ +from datetime import datetime + + +def validate_datetime(v, _): + if not isinstance(v, datetime): + raise Exception(f"Expected datetime, got {v}") + return v diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/bson.py b/pythonFiles/lib/jedilsp/cattrs/preconf/bson.py new file mode 100644 index 0000000000000..f6d5658285b12 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/bson.py @@ -0,0 +1,97 @@ +"""Preconfigured converters for bson.""" +from base64 import b85decode, b85encode +from datetime import datetime +from typing import Any, Type, TypeVar + +from bson import DEFAULT_CODEC_OPTIONS, CodecOptions, ObjectId, decode, encode + +from cattrs._compat import AbstractSet, is_mapping +from cattrs.gen import make_mapping_structure_fn + +from ..converters import BaseConverter, Converter +from . import validate_datetime + +T = TypeVar("T") + + +class Base85Bytes(bytes): + """A subclass to help with binary key encoding/decoding.""" + + +class BsonConverter(Converter): + def dumps( + self, + obj: Any, + unstructure_as=None, + check_keys: bool = False, + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + ) -> bytes: + return encode( + self.unstructure(obj, unstructure_as=unstructure_as), + check_keys=check_keys, + codec_options=codec_options, + ) + + def loads( + self, + data: bytes, + cl: Type[T], + codec_options: CodecOptions = DEFAULT_CODEC_OPTIONS, + ) -> T: + return self.structure(decode(data, codec_options=codec_options), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the bson library. + + * sets are serialized as lists + * byte mapping keys are base85-encoded into strings when unstructuring, and reverse + * non-string, non-byte mapping keys are coerced into strings when unstructuring + * a deserialization hook is registered for bson.ObjectId by default + """ + + def gen_unstructure_mapping(cl: Any, unstructure_to=None): + key_handler = str + args = getattr(cl, "__args__", None) + if args: + if issubclass(args[0], str): + key_handler = None + elif issubclass(args[0], bytes): + + def key_handler(k): + return b85encode(k).decode("utf8") + + return converter.gen_unstructure_mapping( + cl, unstructure_to=unstructure_to, key_handler=key_handler + ) + + def gen_structure_mapping(cl: Any): + args = getattr(cl, "__args__", None) + if args and issubclass(args[0], bytes): + h = make_mapping_structure_fn(cl, converter, key_type=Base85Bytes) + else: + h = make_mapping_structure_fn(cl, converter) + return h + + converter.register_structure_hook(Base85Bytes, lambda v, _: b85decode(v)) + converter._unstructure_func.register_func_list( + [(is_mapping, gen_unstructure_mapping, True)] + ) + converter._structure_func.register_func_list( + [(is_mapping, gen_structure_mapping, True)] + ) + + converter.register_structure_hook(datetime, validate_datetime) + converter.register_structure_hook(ObjectId, lambda v, _: ObjectId(v)) + + +def make_converter(*args, **kwargs) -> BsonConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + AbstractSet: list, + } + res = BsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/json.py b/pythonFiles/lib/jedilsp/cattrs/preconf/json.py new file mode 100644 index 0000000000000..2de7077404c94 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/json.py @@ -0,0 +1,48 @@ +"""Preconfigured converters for the stdlib json.""" +from base64 import b85decode, b85encode +from datetime import datetime +from json import dumps, loads +from typing import Any, Type, TypeVar, Union + +from cattrs._compat import AbstractSet, Counter + +from ..converters import BaseConverter, Converter + +T = TypeVar("T") + + +class JsonConverter(Converter): + def dumps(self, obj: Any, unstructure_as=None, **kwargs) -> str: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: Union[bytes, str], cl: Type[T], **kwargs) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the stdlib json module. + + * bytes are serialized as base64 strings + * datetimes are serialized as ISO 8601 + * counters are serialized as dicts + * sets are serialized as lists + """ + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + converter.register_unstructure_hook(datetime, lambda v: v.isoformat()) + converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v)) + + +def make_converter(*args, **kwargs) -> JsonConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + AbstractSet: list, + Counter: dict, + } + res = JsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/msgpack.py b/pythonFiles/lib/jedilsp/cattrs/preconf/msgpack.py new file mode 100644 index 0000000000000..54b8bad22e9fe --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/msgpack.py @@ -0,0 +1,43 @@ +"""Preconfigured converters for msgpack.""" +from datetime import datetime, timezone +from typing import Any, Type, TypeVar + +from msgpack import dumps, loads + +from cattrs._compat import AbstractSet + +from ..converters import BaseConverter, Converter + +T = TypeVar("T") + + +class MsgpackConverter(Converter): + def dumps(self, obj: Any, unstructure_as=None, **kwargs) -> bytes: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: bytes, cl: Type[T], **kwargs) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the msgpack library. + + * datetimes are serialized as timestamp floats + * sets are serialized as lists + """ + converter.register_unstructure_hook(datetime, lambda v: v.timestamp()) + converter.register_structure_hook( + datetime, lambda v, _: datetime.fromtimestamp(v, timezone.utc) + ) + + +def make_converter(*args, **kwargs) -> MsgpackConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + AbstractSet: list, + } + res = MsgpackConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/orjson.py b/pythonFiles/lib/jedilsp/cattrs/preconf/orjson.py new file mode 100644 index 0000000000000..fe0b143b6f48a --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/orjson.py @@ -0,0 +1,77 @@ +"""Preconfigured converters for orjson.""" +from base64 import b85decode, b85encode +from datetime import datetime +from enum import Enum +from typing import Any, Type, TypeVar + +from orjson import dumps, loads + +from cattrs._compat import AbstractSet, is_mapping + +from ..converters import BaseConverter, Converter + +T = TypeVar("T") + + +class OrjsonConverter(Converter): + def dumps(self, obj: Any, unstructure_as=None, **kwargs) -> bytes: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: bytes, cl: Type[T]) -> T: + return self.structure(loads(data), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the orjson library. + + * bytes are serialized as base85 strings + * datetimes are serialized as ISO 8601 + * sets are serialized as lists + * string enum mapping keys have special handling + * mapping keys are coerced into strings when unstructuring + """ + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + + converter.register_unstructure_hook(datetime, lambda v: v.isoformat()) + converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v)) + + def gen_unstructure_mapping(cl: Any, unstructure_to=None): + key_handler = str + args = getattr(cl, "__args__", None) + if args: + if issubclass(args[0], str) and issubclass(args[0], Enum): + + def key_handler(v): + return v.value + + else: + # It's possible the handler for the key type has been overridden. + # (For example base85 encoding for bytes.) + # In that case, we want to use the override. + + kh = converter._unstructure_func.dispatch(args[0]) + if kh != converter._unstructure_identity: + key_handler = kh + + return converter.gen_unstructure_mapping( + cl, unstructure_to=unstructure_to, key_handler=key_handler + ) + + converter._unstructure_func.register_func_list( + [(is_mapping, gen_unstructure_mapping, True)] + ) + + +def make_converter(*args, **kwargs) -> OrjsonConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + AbstractSet: list, + } + res = OrjsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/pyyaml.py b/pythonFiles/lib/jedilsp/cattrs/preconf/pyyaml.py new file mode 100644 index 0000000000000..a2fd7c82f0de7 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/pyyaml.py @@ -0,0 +1,44 @@ +"""Preconfigured converters for pyyaml.""" +from datetime import datetime +from typing import Any, Type, TypeVar + +from yaml import safe_dump, safe_load + +from cattrs._compat import FrozenSetSubscriptable + +from ..converters import BaseConverter, Converter +from . import validate_datetime + +T = TypeVar("T") + + +class PyyamlConverter(Converter): + def dumps(self, obj: Any, unstructure_as=None, **kwargs) -> str: + return safe_dump(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: str, cl: Type[T]) -> T: + return self.structure(safe_load(data), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the pyyaml library. + + * frozensets are serialized as lists + * string enums are converted into strings explicitly + """ + converter.register_unstructure_hook( + str, lambda v: v if v.__class__ is str else v.value + ) + converter.register_structure_hook(datetime, validate_datetime) + + +def make_converter(*args, **kwargs) -> PyyamlConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + FrozenSetSubscriptable: list, + } + res = PyyamlConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/tomlkit.py b/pythonFiles/lib/jedilsp/cattrs/preconf/tomlkit.py new file mode 100644 index 0000000000000..3c103a3e76034 --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/tomlkit.py @@ -0,0 +1,77 @@ +"""Preconfigured converters for tomlkit.""" +from base64 import b85decode, b85encode +from datetime import datetime +from enum import Enum +from operator import attrgetter +from typing import Any, Type, TypeVar + +from tomlkit import dumps, loads + +from cattrs._compat import AbstractSet, is_mapping + +from ..converters import BaseConverter, Converter +from . import validate_datetime + +T = TypeVar("T") +_enum_value_getter = attrgetter("_value_") + + +class TomlkitConverter(Converter): + def dumps(self, obj: Any, unstructure_as=None, **kwargs) -> str: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: str, cl: Type[T]) -> T: + return self.structure(loads(data), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the tomlkit library. + + * bytes are serialized as base85 strings + * sets are serialized as lists + * tuples are serializas as lists + * mapping keys are coerced into strings when unstructuring + """ + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + + def gen_unstructure_mapping(cl: Any, unstructure_to=None): + key_handler = str + args = getattr(cl, "__args__", None) + if args: + # Currently, tomlkit has inconsistent behavior on 3.11 + # so we paper over it here. + # https://github.com/sdispater/tomlkit/issues/237 + if issubclass(args[0], str): + if issubclass(args[0], Enum): + key_handler = _enum_value_getter + else: + key_handler = None + elif issubclass(args[0], bytes): + + def key_handler(k: bytes): + return b85encode(k).decode("utf8") + + return converter.gen_unstructure_mapping( + cl, unstructure_to=unstructure_to, key_handler=key_handler + ) + + converter._unstructure_func.register_func_list( + [(is_mapping, gen_unstructure_mapping, True)] + ) + converter.register_structure_hook(datetime, validate_datetime) + + +def make_converter(*args, **kwargs) -> TomlkitConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + AbstractSet: list, + tuple: list, + } + res = TomlkitConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/preconf/ujson.py b/pythonFiles/lib/jedilsp/cattrs/preconf/ujson.py new file mode 100644 index 0000000000000..c340ad6748dcf --- /dev/null +++ b/pythonFiles/lib/jedilsp/cattrs/preconf/ujson.py @@ -0,0 +1,48 @@ +"""Preconfigured converters for ujson.""" +from base64 import b85decode, b85encode +from datetime import datetime +from typing import Any, AnyStr, Type, TypeVar + +from ujson import dumps, loads + +from cattrs._compat import AbstractSet + +from ..converters import BaseConverter, Converter + +T = TypeVar("T") + + +class UjsonConverter(Converter): + def dumps(self, obj: Any, unstructure_as=None, **kwargs) -> str: + return dumps(self.unstructure(obj, unstructure_as=unstructure_as), **kwargs) + + def loads(self, data: AnyStr, cl: Type[T], **kwargs) -> T: + return self.structure(loads(data, **kwargs), cl) + + +def configure_converter(converter: BaseConverter): + """ + Configure the converter for use with the ujson library. + + * bytes are serialized as base64 strings + * datetimes are serialized as ISO 8601 + * sets are serialized as lists + """ + converter.register_unstructure_hook( + bytes, lambda v: (b85encode(v) if v else b"").decode("utf8") + ) + converter.register_structure_hook(bytes, lambda v, _: b85decode(v)) + + converter.register_unstructure_hook(datetime, lambda v: v.isoformat()) + converter.register_structure_hook(datetime, lambda v, _: datetime.fromisoformat(v)) + + +def make_converter(*args, **kwargs) -> UjsonConverter: + kwargs["unstruct_collection_overrides"] = { + **kwargs.get("unstruct_collection_overrides", {}), + AbstractSet: list, + } + res = UjsonConverter(*args, **kwargs) + configure_converter(res) + + return res diff --git a/pythonFiles/lib/jedilsp/cattrs/py.typed b/pythonFiles/lib/jedilsp/cattrs/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/INSTALLER b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/INSTALLER new file mode 100644 index 0000000000000..a1b589e38a320 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/LICENSE b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/LICENSE new file mode 100644 index 0000000000000..8000a6faacf47 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random + Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/METADATA b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/METADATA new file mode 100644 index 0000000000000..dd915c5315398 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/METADATA @@ -0,0 +1,64 @@ +Metadata-Version: 2.1 +Name: docstring-to-markdown +Version: 0.10 +Summary: On the fly conversion of Python docstrings to markdown +Home-page: UNKNOWN +Author: Michał Krassowski +Author-email: krassowski.michal+pypi@gmail.com +License: LGPL-2.1 +Project-URL: Bug Tracker, https://github.com/krassowski/docstring-to-markdown/issues +Project-URL: Source Code, https://github.com/krassowski/docstring-to-markdown +Keywords: Docstring,conversion,markdown +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+) +Classifier: Programming Language :: Python +Requires-Python: >=3.6 +Description-Content-Type: text/markdown + +# docstring-to-markdown + +[![tests](https://github.com/krassowski/docstring-to-markdown/workflows/tests/badge.svg)](https://github.com/krassowski/docstring-to-markdown/actions?query=workflow%3A%22tests%22) +![CodeQL](https://github.com/krassowski/docstring-to-markdown/workflows/CodeQL/badge.svg) +[![pypi-version](https://img.shields.io/pypi/v/docstring-to-markdown.svg)](https://python.org/pypi/docstring-to-markdown) + +On the fly conversion of Python docstrings to markdown + +- Python 3.6+ +- currently can recognise reStructuredText and convert multiple of its features to Markdown +- in the future will be able to convert Google docstrings too + +### Installation + +```bash +pip install docstring-to-markdown +``` + + +### Example + +Convert reStructuredText: + +```python +>>> import docstring_to_markdown +>>> docstring_to_markdown.convert(':math:`\\sum`') +'$\\sum$' +``` + +When given the format cannot be recognised an exception will be raised: + +```python +>>> docstring_to_markdown.convert('\\sum') +Traceback (most recent call last): + raise UnknownFormatError() +docstring_to_markdown.UnknownFormatError +``` + +### Development + +```bash +pip install -e . +pytest +``` + + diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/RECORD b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/RECORD new file mode 100644 index 0000000000000..e89d4fb0edf1e --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/RECORD @@ -0,0 +1,12 @@ +docstring_to_markdown-0.10.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +docstring_to_markdown-0.10.dist-info/LICENSE,sha256=IMF9i4xIpgCADf0U-V1cuf9HBmqWQd3qtI3FSuyW4zE,26526 +docstring_to_markdown-0.10.dist-info/METADATA,sha256=sbCwIv4gsNsP1rl_JLqTmxqJVkk75ofkLXuO30UZ05c,1857 +docstring_to_markdown-0.10.dist-info/RECORD,, +docstring_to_markdown-0.10.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +docstring_to_markdown-0.10.dist-info/WHEEL,sha256=ewwEueio1C2XeHTvT17n8dZUJgOvyCWCt0WVNLClP9o,92 +docstring_to_markdown-0.10.dist-info/top_level.txt,sha256=_af5482lGo-DbcT_gtcbEzVnIH6VqsMvN7O2CgBvYNA,22 +docstring_to_markdown/__init__.py,sha256=rZiwXLSp35EDP0jKQN5QNFeC9Mz9-19teO5tbMbbPyk,264 +docstring_to_markdown/__pycache__/__init__.cpython-39.pyc,, +docstring_to_markdown/__pycache__/rst.cpython-39.pyc,, +docstring_to_markdown/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +docstring_to_markdown/rst.py,sha256=QNCjZ44qr34pfDZb-jR_59odQfAnX9EU6lFtAZyhDYE,23548 diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/REQUESTED b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/REQUESTED new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/WHEEL b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/WHEEL new file mode 100644 index 0000000000000..5bad85fdc1cd0 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.37.0) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/top_level.txt b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/top_level.txt new file mode 100644 index 0000000000000..f41a06f5ad456 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.10.dist-info/top_level.txt @@ -0,0 +1 @@ +docstring_to_markdown diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/INSTALLER b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/INSTALLER new file mode 100644 index 0000000000000..a1b589e38a320 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/LICENSE b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/LICENSE new file mode 100644 index 0000000000000..8000a6faacf47 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random + Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/METADATA b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/METADATA new file mode 100644 index 0000000000000..6445b9620749f --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/METADATA @@ -0,0 +1,66 @@ +Metadata-Version: 2.1 +Name: docstring-to-markdown +Version: 0.11 +Summary: On the fly conversion of Python docstrings to markdown +Home-page: UNKNOWN +Author: Michał Krassowski +Author-email: krassowski.michal+pypi@gmail.com +License: LGPL-2.1-or-later +Project-URL: Bug Tracker, https://github.com/python-lsp/docstring-to-markdown/issues +Project-URL: Source Code, https://github.com/python-lsp/docstring-to-markdown +Keywords: Docstring,conversion,markdown +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+) +Classifier: Programming Language :: Python +Classifier: Topic :: Text Processing :: Markup +Classifier: Topic :: Documentation :: Sphinx +Requires-Python: >=3.6 +Description-Content-Type: text/markdown + +# docstring-to-markdown + +[![tests](https://github.com/python-lsp/docstring-to-markdown/workflows/tests/badge.svg)](https://github.com/python-lsp/docstring-to-markdown/actions?query=workflow%3A%22tests%22) +![CodeQL](https://github.com/python-lsp/docstring-to-markdown/workflows/CodeQL/badge.svg) +[![pypi-version](https://img.shields.io/pypi/v/docstring-to-markdown.svg)](https://python.org/pypi/docstring-to-markdown) + +On the fly conversion of Python docstrings to markdown + +- Python 3.6+ +- currently can recognise reStructuredText and convert multiple of its features to Markdown +- in the future will be able to convert Google docstrings too + +### Installation + +```bash +pip install docstring-to-markdown +``` + + +### Example + +Convert reStructuredText: + +```python +>>> import docstring_to_markdown +>>> docstring_to_markdown.convert(':math:`\\sum`') +'$\\sum$' +``` + +When given the format cannot be recognised an exception will be raised: + +```python +>>> docstring_to_markdown.convert('\\sum') +Traceback (most recent call last): + raise UnknownFormatError() +docstring_to_markdown.UnknownFormatError +``` + +### Development + +```bash +pip install -e . +pytest +``` + + diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/RECORD b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/RECORD new file mode 100644 index 0000000000000..b4e7e4089d522 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/RECORD @@ -0,0 +1,12 @@ +docstring_to_markdown-0.11.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +docstring_to_markdown-0.11.dist-info/LICENSE,sha256=IMF9i4xIpgCADf0U-V1cuf9HBmqWQd3qtI3FSuyW4zE,26526 +docstring_to_markdown-0.11.dist-info/METADATA,sha256=udcc49aaySu4fjsb2RZCYkkUXaP7uznhI4sGb-hGW8U,1958 +docstring_to_markdown-0.11.dist-info/RECORD,, +docstring_to_markdown-0.11.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +docstring_to_markdown-0.11.dist-info/WHEEL,sha256=2wepM1nk4DS4eFpYrW1TTqPcoGNfHhhO_i5m4cOimbo,92 +docstring_to_markdown-0.11.dist-info/top_level.txt,sha256=_af5482lGo-DbcT_gtcbEzVnIH6VqsMvN7O2CgBvYNA,22 +docstring_to_markdown/__init__.py,sha256=iH1YOwDKLPGRAQad3VjmdVWTruL2lJkPsrFFQdpiCMU,264 +docstring_to_markdown/__pycache__/__init__.cpython-39.pyc,, +docstring_to_markdown/__pycache__/rst.cpython-39.pyc,, +docstring_to_markdown/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +docstring_to_markdown/rst.py,sha256=2wOn1YLKmO6n--uWKAniNmBwnonMiM1A28Crn4sImKg,25134 diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/REQUESTED b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/REQUESTED new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/WHEEL b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/WHEEL new file mode 100644 index 0000000000000..57e3d840d59a6 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.38.4) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/top_level.txt b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/top_level.txt new file mode 100644 index 0000000000000..f41a06f5ad456 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.11.dist-info/top_level.txt @@ -0,0 +1 @@ +docstring_to_markdown diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/INSTALLER b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/INSTALLER new file mode 100644 index 0000000000000..a1b589e38a320 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/LICENSE b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/LICENSE new file mode 100644 index 0000000000000..8000a6faacf47 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/LICENSE @@ -0,0 +1,504 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 + USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random + Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/METADATA b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/METADATA new file mode 100644 index 0000000000000..48fbff7dfa0e4 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/METADATA @@ -0,0 +1,64 @@ +Metadata-Version: 2.1 +Name: docstring-to-markdown +Version: 0.9 +Summary: On the fly conversion of Python docstrings to markdown +Home-page: UNKNOWN +Author: Michał Krassowski +Author-email: krassowski.michal+pypi@gmail.com +License: LGPL-2.1 +Project-URL: Bug Tracker, https://github.com/krassowski/docstring-to-markdown/issues +Project-URL: Source Code, https://github.com/krassowski/docstring-to-markdown +Keywords: Docstring,conversion,markdown +Platform: UNKNOWN +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+) +Classifier: Programming Language :: Python +Requires-Python: >=3.6 +Description-Content-Type: text/markdown + +# docstring-to-markdown + +[![tests](https://github.com/krassowski/docstring-to-markdown/workflows/tests/badge.svg)](https://github.com/krassowski/docstring-to-markdown/actions?query=workflow%3A%22tests%22) +![CodeQL](https://github.com/krassowski/docstring-to-markdown/workflows/CodeQL/badge.svg) +[![pypi-version](https://img.shields.io/pypi/v/docstring-to-markdown.svg)](https://python.org/pypi/docstring-to-markdown) + +On the fly conversion of Python docstrings to markdown + +- Python 3.6+ +- currently can recognise reStructuredText and convert multiple of its features to Markdown +- in the future will be able to convert Google docstrings too + +### Installation + +```bash +pip install docstring-to-markdown +``` + + +### Example + +Convert reStructuredText: + +```python +>>> import docstring_to_markdown +>>> docstring_to_markdown.convert(':math:`\\sum`') +'$\\sum$' +``` + +When given the format cannot be recognised an exception will be raised: + +```python +>>> docstring_to_markdown.convert('\\sum') +Traceback (most recent call last): + raise UnknownFormatError() +docstring_to_markdown.UnknownFormatError +``` + +### Development + +```bash +pip install -e . +pytest +``` + + diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/RECORD b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/RECORD new file mode 100644 index 0000000000000..968c09af4b841 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/RECORD @@ -0,0 +1,12 @@ +docstring_to_markdown-0.9.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +docstring_to_markdown-0.9.dist-info/LICENSE,sha256=IMF9i4xIpgCADf0U-V1cuf9HBmqWQd3qtI3FSuyW4zE,26526 +docstring_to_markdown-0.9.dist-info/METADATA,sha256=mNKcTnWj_2PANURywQH7oG5yc2Pt7LFf6JpnVKkTMp4,1856 +docstring_to_markdown-0.9.dist-info/RECORD,, +docstring_to_markdown-0.9.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +docstring_to_markdown-0.9.dist-info/WHEEL,sha256=OqRkF0eY5GHssMorFjlbTIq072vpHpF60fIQA6lS9xA,92 +docstring_to_markdown-0.9.dist-info/top_level.txt,sha256=_af5482lGo-DbcT_gtcbEzVnIH6VqsMvN7O2CgBvYNA,22 +docstring_to_markdown/__init__.py,sha256=auUXaxeCO2P0Y-sDvud4vfVUxfYh2exzcM9N-WnTYoQ,263 +docstring_to_markdown/__pycache__/__init__.cpython-39.pyc,, +docstring_to_markdown/__pycache__/rst.cpython-39.pyc,, +docstring_to_markdown/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +docstring_to_markdown/rst.py,sha256=NQZXQpI1qG4OiNz-2UuLO3eLWcfhbbD79CT4MOAMl9Q,20206 diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/REQUESTED b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/REQUESTED new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/WHEEL b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/WHEEL new file mode 100644 index 0000000000000..385faab0525cc --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/WHEEL @@ -0,0 +1,5 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.36.2) +Root-Is-Purelib: true +Tag: py3-none-any + diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/top_level.txt b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/top_level.txt new file mode 100644 index 0000000000000..f41a06f5ad456 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown-0.9.dist-info/top_level.txt @@ -0,0 +1 @@ +docstring_to_markdown diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown/__init__.py b/pythonFiles/lib/jedilsp/docstring_to_markdown/__init__.py new file mode 100644 index 0000000000000..583f87a8c6540 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown/__init__.py @@ -0,0 +1,13 @@ +from .rst import looks_like_rst, rst_to_markdown + +__version__ = "0.11" + + +class UnknownFormatError(Exception): + pass + + +def convert(docstring: str) -> str: + if looks_like_rst(docstring): + return rst_to_markdown(docstring) + raise UnknownFormatError() diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown/py.typed b/pythonFiles/lib/jedilsp/docstring_to_markdown/py.typed new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/pythonFiles/lib/jedilsp/docstring_to_markdown/rst.py b/pythonFiles/lib/jedilsp/docstring_to_markdown/rst.py new file mode 100644 index 0000000000000..42e2bc9586767 --- /dev/null +++ b/pythonFiles/lib/jedilsp/docstring_to_markdown/rst.py @@ -0,0 +1,827 @@ +from abc import ABC, abstractmethod +from enum import IntEnum, auto +from types import SimpleNamespace +from typing import Union, List, Dict +import re + + +class Directive: + def __init__( + self, pattern: str, replacement: str, + name: Union[str, None] = None, + flags: int = 0 + ): + self.pattern = pattern + self.replacement = replacement + self.name = name + self.flags = flags + + +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#cross-referencing-python-objects +SPHINX_CROSS_REF_PYTHON = ( + 'mod', + 'func', + 'data', + 'const', + 'class', + 'meth', + 'attr', + 'exc', + 'obj' +) + +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#cross-referencing-c-constructs +SPHINX_CROSS_REF_C = ( + 'member', + 'data', + 'func', + 'macro', + 'struct', + 'union', + 'enum', + 'enumerator', + 'type' +) + +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#cross-referencing +SPHINX_CROSS_REF_CPP = ( + 'any', + 'class', + 'struct', + 'func', + 'member', + 'var', + 'type', + 'concept', + 'enum', + 'enumerator' +) + +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#the-javascript-domain +SPHINX_CROSS_REF_JS = ( + 'mod', + 'func', + 'meth', + 'class', + 'data', + 'attr' +) + +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/domains.html#the-restructuredtext-domain +SPHINX_CROSS_REF_RST = ( + 'dir', + 'role' +) + +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html +SPHINX_CROSS_REF_OTHER = ( + 'any', + # https://www.sphinx-doc.org/en/master/usage/restructuredtext/roles.html#cross-referencing-other-items-of-interest + 'envvar', + 'token', + 'keyword', + 'option', + 'term', +) + +SPHINX_PARAM = ( + 'param', + 'parameter', + 'arg', + 'argument', + 'key', + 'keyword' +) + +SPHINX_RULES: List[Directive] = [ + Directive( + pattern=r':c:({}):`\.?(?P[^`]+?)`'.format('|'.join(SPHINX_CROSS_REF_C)), + replacement=r'`\g`' + ), + Directive( + pattern=r':cpp:({}):`\.?(?P[^`]+?)`'.format('|'.join(SPHINX_CROSS_REF_CPP)), + replacement=r'`\g`' + ), + Directive( + pattern=r':js:({}):`\.?(?P[^`]+?)`'.format('|'.join(SPHINX_CROSS_REF_JS)), + replacement=r'`\g`' + ), + Directive( + pattern=r'(:py)?:({}):`\.?(?P[^`]+?)`'.format('|'.join(SPHINX_CROSS_REF_PYTHON)), + replacement=r'`\g`' + ), + Directive( + pattern=r'(:rst)?:({}):`\.?(?P[^`]+?)`'.format('|'.join(SPHINX_CROSS_REF_RST)), + replacement=r'`\g`' + ), + Directive( + pattern=r':({}):`\.?(?P[^`]+?)`'.format('|'.join(SPHINX_CROSS_REF_OTHER)), + replacement=r'`\g`' + ), + Directive( + pattern=r'^\s*:({}) (?P\S+) (?P\S+):'.format('|'.join(SPHINX_PARAM)), + replacement=r'- `\g` (`\g`):', + flags=re.MULTILINE + ), + Directive( + pattern=r'^\s*:({}) (?P\S+): (?P.*)(\n|\r\n?):type \2: (?P.*)$'.format('|'.join(SPHINX_PARAM)), + replacement=r'- `\g` (\g): \g', + flags=re.MULTILINE + ), + Directive( + pattern=r'^\s*:({}) (?P\S+):'.format('|'.join(SPHINX_PARAM)), + replacement=r'- `\g`:', + flags=re.MULTILINE + ), + Directive( + pattern=r'^\s*:type (?P\S+):', + replacement=r' . Type: `\g`:', + flags=re.MULTILINE + ), + Directive( + pattern=r'^\s*:(return|returns):', + replacement=r'- returns:', + flags=re.MULTILINE + ), + Directive( + pattern=r'^\s*:rtype: (?P\S+)', + replacement=r'- return type: `\g`', + flags=re.MULTILINE + ), + Directive( + pattern=r'^\s*:(raises|raise|except|exception) (?P\S+):', + replacement=r'- raises `\g`:', + flags=re.MULTILINE + ), +] + + +class Admonition: + def __init__(self, name: str, label: str, icon: str = ''): + self.name = name + self.label = label + self.icon = icon + + @property + def block_markdown(self): + return f'{self.icon} **{self.label}**' + + @property + def inline_markdown(self): + return self.block_markdown + ':' + + +ADMONITIONS = [ + Admonition( + name='caution', + label='Caution', + icon='⚠️ ' + ), + Admonition( + name='attention', + label='Attention', + icon='⚠️ ' + ), + Admonition( + name='danger', + label='Danger', + icon='⚠️ ' + ), + Admonition( + name='hint', + label='Hint', + icon='🛈' + ), + Admonition( + name='important', + label='Important', + icon='⚠️ ' + ), + Admonition( + name='note', + label='Note', + icon='🛈' + ), + Admonition( + name='tip', + label='Tip', + icon='🛈' + ), + Admonition( + name='warning', + label='Warning', + icon='⚠️ ' + ) +] + + +ADMONITION_DIRECTIVES: List[Directive] = [ + # https://docutils.sourceforge.io/docs/ref/rst/directives.html#admonitions + Directive( + pattern=rf'\.\. {admonition.name}::', + replacement=admonition.inline_markdown + ) + for admonition in ADMONITIONS +] + + +RST_DIRECTIVES: List[Directive] = [ + Directive( + pattern=r'\.\. versionchanged:: (?P\S+)(?P$|\n)', + replacement=r'*Changed in \g*\g' + ), + Directive( + pattern=r'\.\. versionadded:: (?P\S+)(?P$|\n)', + replacement=r'*Added in \g*\g' + ), + Directive( + pattern=r'\.\. deprecated:: (?P\S+)(?P$|\n)', + replacement=r'*Deprecated since \g*\g' + ), + *ADMONITION_DIRECTIVES, + Directive( + pattern=r'\.\. seealso::(?P.*)(?P$|\n)', + replacement=r'*See also*\g\g' + ), + Directive( + pattern=r':ref:`(?P