summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/msgspec
diff options
context:
space:
mode:
Diffstat (limited to 'venv/lib/python3.11/site-packages/msgspec')
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__init__.py61
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__init__.pyi207
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/__init__.cpython-311.pycbin0 -> 1949 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/_json_schema.cpython-311.pycbin0 -> 23554 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/_utils.cpython-311.pycbin0 -> 12136 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/_version.cpython-311.pycbin0 -> 597 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/inspect.cpython-311.pycbin0 -> 40161 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/json.cpython-311.pycbin0 -> 519 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/msgpack.cpython-311.pycbin0 -> 427 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/structs.cpython-311.pycbin0 -> 4469 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/toml.cpython-311.pycbin0 -> 7319 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/__pycache__/yaml.cpython-311.pycbin0 -> 6905 bytes
-rwxr-xr-xvenv/lib/python3.11/site-packages/msgspec/_core.cpython-311-x86_64-linux-gnu.sobin0 -> 405992 bytes
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/_json_schema.py439
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/_utils.py289
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/_version.py21
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/inspect.py1005
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/json.py8
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/json.pyi113
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/msgpack.py7
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/msgpack.pyi103
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/py.typed0
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/structs.py106
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/structs.pyi37
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/toml.py190
-rw-r--r--venv/lib/python3.11/site-packages/msgspec/yaml.py185
26 files changed, 2771 insertions, 0 deletions
diff --git a/venv/lib/python3.11/site-packages/msgspec/__init__.py b/venv/lib/python3.11/site-packages/msgspec/__init__.py
new file mode 100644
index 0000000..2bd99f5
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__init__.py
@@ -0,0 +1,61 @@
+from ._core import (
+ DecodeError,
+ EncodeError,
+ Field as _Field,
+ Meta,
+ MsgspecError,
+ Raw,
+ Struct,
+ UnsetType,
+ UNSET,
+ NODEFAULT,
+ ValidationError,
+ defstruct,
+ convert,
+ to_builtins,
+)
+
+
+def field(*, default=NODEFAULT, default_factory=NODEFAULT, name=None):
+ return _Field(default=default, default_factory=default_factory, name=name)
+
+
+def from_builtins(
+ obj,
+ type,
+ *,
+ str_keys=False,
+ str_values=False,
+ builtin_types=None,
+ dec_hook=None,
+):
+ """DEPRECATED: use ``msgspec.convert`` instead"""
+ import warnings
+
+ warnings.warn(
+ "`msgspec.from_builtins` is deprecated, please use `msgspec.convert` instead",
+ stacklevel=2,
+ )
+ return convert(
+ obj,
+ type,
+ strict=not str_values,
+ dec_hook=dec_hook,
+ builtin_types=builtin_types,
+ str_keys=str_keys,
+ )
+
+
+field.__doc__ = _Field.__doc__
+
+
+from . import msgpack
+from . import json
+from . import yaml
+from . import toml
+from . import inspect
+from . import structs
+from ._version import get_versions
+
+__version__ = get_versions()["version"]
+del get_versions
diff --git a/venv/lib/python3.11/site-packages/msgspec/__init__.pyi b/venv/lib/python3.11/site-packages/msgspec/__init__.pyi
new file mode 100644
index 0000000..86a1051
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__init__.pyi
@@ -0,0 +1,207 @@
+import enum
+from typing import (
+ Any,
+ Callable,
+ ClassVar,
+ Dict,
+ Final,
+ Iterable,
+ Literal,
+ Mapping,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+from typing_extensions import dataclass_transform
+
+from . import inspect, json, msgpack, structs, toml, yaml
+
+T = TypeVar("T")
+
+class UnsetType(enum.Enum):
+ UNSET = "UNSET"
+
+UNSET = UnsetType.UNSET
+
+class _NoDefault(enum.Enum):
+ NODEFAULT = "NODEFAULT"
+
+NODEFAULT = _NoDefault.NODEFAULT
+
+@overload
+def field(*, default: T, name: Optional[str] = None) -> T: ...
+@overload
+def field(*, default_factory: Callable[[], T], name: Optional[str] = None) -> T: ...
+@overload
+def field(*, name: Optional[str] = None) -> Any: ...
+@dataclass_transform(field_specifiers=(field,))
+class Struct:
+ __struct_fields__: ClassVar[Tuple[str, ...]]
+ __struct_config__: ClassVar[structs.StructConfig]
+ __match_args__: ClassVar[Tuple[str, ...]]
+ # A default __init__ so that Structs with unknown field types (say
+ # constructed by `defstruct`) won't error on every call to `__init__`
+ def __init__(self, *args: Any, **kwargs: Any) -> None: ...
+ def __init_subclass__(
+ cls,
+ tag: Union[None, bool, str, int, Callable[[str], Union[str, int]]] = None,
+ tag_field: Union[None, str] = None,
+ rename: Union[
+ None,
+ Literal["lower", "upper", "camel", "pascal", "kebab"],
+ Callable[[str], Optional[str]],
+ Mapping[str, str],
+ ] = None,
+ omit_defaults: bool = False,
+ forbid_unknown_fields: bool = False,
+ frozen: bool = False,
+ eq: bool = True,
+ order: bool = False,
+ kw_only: bool = False,
+ repr_omit_defaults: bool = False,
+ array_like: bool = False,
+ gc: bool = True,
+ weakref: bool = False,
+ dict: bool = False,
+ cache_hash: bool = False,
+ ) -> None: ...
+ def __rich_repr__(
+ self,
+ ) -> Iterable[Union[Any, Tuple[Any], Tuple[str, Any], Tuple[str, Any, Any]]]: ...
+
+def defstruct(
+ name: str,
+ fields: Iterable[Union[str, Tuple[str, type], Tuple[str, type, Any]]],
+ *,
+ bases: Optional[Tuple[Type[Struct], ...]] = None,
+ module: Optional[str] = None,
+ namespace: Optional[Dict[str, Any]] = None,
+ tag: Union[None, bool, str, int, Callable[[str], Union[str, int]]] = None,
+ tag_field: Union[None, str] = None,
+ rename: Union[
+ None,
+ Literal["lower", "upper", "camel", "pascal", "kebab"],
+ Callable[[str], Optional[str]],
+ Mapping[str, str],
+ ] = None,
+ omit_defaults: bool = False,
+ forbid_unknown_fields: bool = False,
+ frozen: bool = False,
+ eq: bool = True,
+ order: bool = False,
+ kw_only: bool = False,
+ repr_omit_defaults: bool = False,
+ array_like: bool = False,
+ gc: bool = True,
+ weakref: bool = False,
+ dict: bool = False,
+ cache_hash: bool = False,
+) -> Type[Struct]: ...
+
+# Lie and say `Raw` is a subclass of `bytes`, so mypy will accept it in most
+# places where an object that implements the buffer protocol is valid
+class Raw(bytes):
+ @overload
+ def __new__(cls) -> "Raw": ...
+ @overload
+ def __new__(cls, msg: Union[bytes, str]) -> "Raw": ...
+ def copy(self) -> "Raw": ...
+
+class Meta:
+ def __init__(
+ self,
+ *,
+ gt: Union[int, float, None] = None,
+ ge: Union[int, float, None] = None,
+ lt: Union[int, float, None] = None,
+ le: Union[int, float, None] = None,
+ multiple_of: Union[int, float, None] = None,
+ pattern: Union[str, None] = None,
+ min_length: Union[int, None] = None,
+ max_length: Union[int, None] = None,
+ tz: Union[bool, None] = None,
+ title: Union[str, None] = None,
+ description: Union[str, None] = None,
+ examples: Union[list, None] = None,
+ extra_json_schema: Union[dict, None] = None,
+ extra: Union[dict, None] = None,
+ ): ...
+ gt: Final[Union[int, float, None]]
+ ge: Final[Union[int, float, None]]
+ lt: Final[Union[int, float, None]]
+ le: Final[Union[int, float, None]]
+ multiple_of: Final[Union[int, float, None]]
+ pattern: Final[Union[str, None]]
+ min_length: Final[Union[int, None]]
+ max_length: Final[Union[int, None]]
+ tz: Final[Union[int, None]]
+ title: Final[Union[str, None]]
+ description: Final[Union[str, None]]
+ examples: Final[Union[list, None]]
+ extra_json_schema: Final[Union[dict, None]]
+ extra: Final[Union[dict, None]]
+ def __rich_repr__(self) -> Iterable[Tuple[str, Any]]: ...
+
+def to_builtins(
+ obj: Any,
+ *,
+ str_keys: bool = False,
+ builtin_types: Union[Iterable[type], None] = None,
+ enc_hook: Optional[Callable[[Any], Any]] = None,
+ order: Literal[None, "deterministic", "sorted"] = None,
+) -> Any: ...
+@overload
+def convert(
+ obj: Any,
+ type: Type[T],
+ *,
+ strict: bool = True,
+ from_attributes: bool = False,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+ builtin_types: Union[Iterable[type], None] = None,
+ str_keys: bool = False,
+) -> T: ...
+@overload
+def convert(
+ obj: Any,
+ type: Any,
+ *,
+ strict: bool = True,
+ from_attributes: bool = False,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+ builtin_types: Union[Iterable[type], None] = None,
+ str_keys: bool = False,
+) -> Any: ...
+
+# TODO: deprecated
+@overload
+def from_builtins(
+ obj: Any,
+ type: Type[T],
+ *,
+ str_keys: bool = False,
+ str_values: bool = False,
+ builtin_types: Union[Iterable[type], None] = None,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> T: ...
+@overload
+def from_builtins(
+ obj: Any,
+ type: Any,
+ *,
+ str_keys: bool = False,
+ str_values: bool = False,
+ builtin_types: Union[Iterable[type], None] = None,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> Any: ...
+
+class MsgspecError(Exception): ...
+class EncodeError(MsgspecError): ...
+class DecodeError(MsgspecError): ...
+class ValidationError(DecodeError): ...
+
+__version__: str
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/__init__.cpython-311.pyc
new file mode 100644
index 0000000..d62691c
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/__init__.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/_json_schema.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/_json_schema.cpython-311.pyc
new file mode 100644
index 0000000..f38e68f
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/_json_schema.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/_utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/_utils.cpython-311.pyc
new file mode 100644
index 0000000..9baa06a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/_utils.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/_version.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/_version.cpython-311.pyc
new file mode 100644
index 0000000..b03453d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/_version.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/inspect.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/inspect.cpython-311.pyc
new file mode 100644
index 0000000..442c609
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/inspect.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/json.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/json.cpython-311.pyc
new file mode 100644
index 0000000..5f83634
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/json.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/msgpack.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/msgpack.cpython-311.pyc
new file mode 100644
index 0000000..3e39d90
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/msgpack.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/structs.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/structs.cpython-311.pyc
new file mode 100644
index 0000000..4c84afc
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/structs.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/toml.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/toml.cpython-311.pyc
new file mode 100644
index 0000000..7e3964a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/toml.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/__pycache__/yaml.cpython-311.pyc b/venv/lib/python3.11/site-packages/msgspec/__pycache__/yaml.cpython-311.pyc
new file mode 100644
index 0000000..5538dd7
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/__pycache__/yaml.cpython-311.pyc
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/_core.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/msgspec/_core.cpython-311-x86_64-linux-gnu.so
new file mode 100755
index 0000000..5f5fe16
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/_core.cpython-311-x86_64-linux-gnu.so
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/msgspec/_json_schema.py b/venv/lib/python3.11/site-packages/msgspec/_json_schema.py
new file mode 100644
index 0000000..be506e3
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/_json_schema.py
@@ -0,0 +1,439 @@
+from __future__ import annotations
+
+import re
+import textwrap
+from collections.abc import Iterable
+from typing import Any, Optional, Callable
+
+from . import inspect as mi, to_builtins
+
+__all__ = ("schema", "schema_components")
+
+
+def schema(
+ type: Any, *, schema_hook: Optional[Callable[[type], dict[str, Any]]] = None
+) -> dict[str, Any]:
+ """Generate a JSON Schema for a given type.
+
+ Any schemas for (potentially) shared components are extracted and stored in
+ a top-level ``"$defs"`` field.
+
+ If you want to generate schemas for multiple types, or to have more control
+ over the generated schema you may want to use ``schema_components`` instead.
+
+ Parameters
+ ----------
+ type : type
+ The type to generate the schema for.
+ schema_hook : callable, optional
+ An optional callback to use for generating JSON schemas of custom
+ types. Will be called with the custom type, and should return a dict
+ representation of the JSON schema for that type.
+
+ Returns
+ -------
+ schema : dict
+ The generated JSON Schema.
+
+ See Also
+ --------
+ schema_components
+ """
+ (out,), components = schema_components((type,), schema_hook=schema_hook)
+ if components:
+ out["$defs"] = components
+ return out
+
+
+def schema_components(
+ types: Iterable[Any],
+ *,
+ schema_hook: Optional[Callable[[type], dict[str, Any]]] = None,
+ ref_template: str = "#/$defs/{name}",
+) -> tuple[tuple[dict[str, Any], ...], dict[str, Any]]:
+ """Generate JSON Schemas for one or more types.
+
+ Any schemas for (potentially) shared components are extracted and returned
+ in a separate ``components`` dict.
+
+ Parameters
+ ----------
+ types : Iterable[type]
+ An iterable of one or more types to generate schemas for.
+ schema_hook : callable, optional
+ An optional callback to use for generating JSON schemas of custom
+ types. Will be called with the custom type, and should return a dict
+ representation of the JSON schema for that type.
+ ref_template : str, optional
+ A template to use when generating ``"$ref"`` fields. This template is
+ formatted with the type name as ``template.format(name=name)``. This
+ can be useful if you intend to store the ``components`` mapping
+ somewhere other than a top-level ``"$defs"`` field. For example, you
+ might use ``ref_template="#/components/{name}"`` if generating an
+ OpenAPI schema.
+
+ Returns
+ -------
+ schemas : tuple[dict]
+ A tuple of JSON Schemas, one for each type in ``types``.
+ components : dict
+ A mapping of name to schema for any shared components used by
+ ``schemas``.
+
+ See Also
+ --------
+ schema
+ """
+ type_infos = mi.multi_type_info(types)
+
+ component_types = _collect_component_types(type_infos)
+
+ name_map = _build_name_map(component_types)
+
+ gen = _SchemaGenerator(name_map, schema_hook, ref_template)
+
+ schemas = tuple(gen.to_schema(t) for t in type_infos)
+
+ components = {
+ name_map[cls]: gen.to_schema(t, False) for cls, t in component_types.items()
+ }
+ return schemas, components
+
+
+def _collect_component_types(type_infos: Iterable[mi.Type]) -> dict[Any, mi.Type]:
+ """Find all types in the type tree that are "nameable" and worthy of being
+ extracted out into a shared top-level components mapping.
+
+ Currently this looks for Struct, Dataclass, NamedTuple, TypedDict, and Enum
+ types.
+ """
+ components = {}
+
+ def collect(t):
+ if isinstance(
+ t, (mi.StructType, mi.TypedDictType, mi.DataclassType, mi.NamedTupleType)
+ ):
+ if t.cls not in components:
+ components[t.cls] = t
+ for f in t.fields:
+ collect(f.type)
+ elif isinstance(t, mi.EnumType):
+ components[t.cls] = t
+ elif isinstance(t, mi.Metadata):
+ collect(t.type)
+ elif isinstance(t, mi.CollectionType):
+ collect(t.item_type)
+ elif isinstance(t, mi.TupleType):
+ for st in t.item_types:
+ collect(st)
+ elif isinstance(t, mi.DictType):
+ collect(t.key_type)
+ collect(t.value_type)
+ elif isinstance(t, mi.UnionType):
+ for st in t.types:
+ collect(st)
+
+ for t in type_infos:
+ collect(t)
+
+ return components
+
+
+def _type_repr(obj):
+ return obj.__name__ if isinstance(obj, type) else repr(obj)
+
+
+def _get_class_name(cls: Any) -> str:
+ if hasattr(cls, "__origin__"):
+ name = cls.__origin__.__name__
+ args = ", ".join(_type_repr(a) for a in cls.__args__)
+ return f"{name}[{args}]"
+ return cls.__name__
+
+
+def _get_doc(t: mi.Type) -> str:
+ assert hasattr(t, "cls")
+ cls = getattr(t.cls, "__origin__", t.cls)
+ doc = getattr(cls, "__doc__", "")
+ if not doc:
+ return ""
+ doc = textwrap.dedent(doc).strip("\r\n")
+ if isinstance(t, mi.EnumType):
+ if doc == "An enumeration.":
+ return ""
+ elif isinstance(t, (mi.NamedTupleType, mi.DataclassType)):
+ if doc.startswith(f"{cls.__name__}(") and doc.endswith(")"):
+ return ""
+ return doc
+
+
+def _build_name_map(component_types: dict[Any, mi.Type]) -> dict[Any, str]:
+ """A mapping from nameable subcomponents to a generated name.
+
+ The generated name is usually a normalized version of the class name. In
+ the case of conflicts, the name will be expanded to also include the full
+ import path.
+ """
+
+ def normalize(name):
+ return re.sub(r"[^a-zA-Z0-9.\-_]", "_", name)
+
+ def fullname(cls):
+ return normalize(f"{cls.__module__}.{cls.__qualname__}")
+
+ conflicts = set()
+ names: dict[str, Any] = {}
+
+ for cls in component_types:
+ name = normalize(_get_class_name(cls))
+ if name in names:
+ old = names.pop(name)
+ conflicts.add(name)
+ names[fullname(old)] = old
+ if name in conflicts:
+ names[fullname(cls)] = cls
+ else:
+ names[name] = cls
+ return {v: k for k, v in names.items()}
+
+
+class _SchemaGenerator:
+ def __init__(
+ self,
+ name_map: dict[Any, str],
+ schema_hook: Optional[Callable[[type], dict[str, Any]]] = None,
+ ref_template: str = "#/$defs/{name}",
+ ):
+ self.name_map = name_map
+ self.schema_hook = schema_hook
+ self.ref_template = ref_template
+
+ def to_schema(self, t: mi.Type, check_ref: bool = True) -> dict[str, Any]:
+ """Converts a Type to a json-schema."""
+ schema: dict[str, Any] = {}
+
+ while isinstance(t, mi.Metadata):
+ schema = mi._merge_json(schema, t.extra_json_schema)
+ t = t.type
+
+ if check_ref and hasattr(t, "cls"):
+ if name := self.name_map.get(t.cls):
+ schema["$ref"] = self.ref_template.format(name=name)
+ return schema
+
+ if isinstance(t, (mi.AnyType, mi.RawType)):
+ pass
+ elif isinstance(t, mi.NoneType):
+ schema["type"] = "null"
+ elif isinstance(t, mi.BoolType):
+ schema["type"] = "boolean"
+ elif isinstance(t, (mi.IntType, mi.FloatType)):
+ schema["type"] = "integer" if isinstance(t, mi.IntType) else "number"
+ if t.ge is not None:
+ schema["minimum"] = t.ge
+ if t.gt is not None:
+ schema["exclusiveMinimum"] = t.gt
+ if t.le is not None:
+ schema["maximum"] = t.le
+ if t.lt is not None:
+ schema["exclusiveMaximum"] = t.lt
+ if t.multiple_of is not None:
+ schema["multipleOf"] = t.multiple_of
+ elif isinstance(t, mi.StrType):
+ schema["type"] = "string"
+ if t.max_length is not None:
+ schema["maxLength"] = t.max_length
+ if t.min_length is not None:
+ schema["minLength"] = t.min_length
+ if t.pattern is not None:
+ schema["pattern"] = t.pattern
+ elif isinstance(t, (mi.BytesType, mi.ByteArrayType, mi.MemoryViewType)):
+ schema["type"] = "string"
+ schema["contentEncoding"] = "base64"
+ if t.max_length is not None:
+ schema["maxLength"] = 4 * ((t.max_length + 2) // 3)
+ if t.min_length is not None:
+ schema["minLength"] = 4 * ((t.min_length + 2) // 3)
+ elif isinstance(t, mi.DateTimeType):
+ schema["type"] = "string"
+ if t.tz is True:
+ schema["format"] = "date-time"
+ elif isinstance(t, mi.TimeType):
+ schema["type"] = "string"
+ if t.tz is True:
+ schema["format"] = "time"
+ elif t.tz is False:
+ schema["format"] = "partial-time"
+ elif isinstance(t, mi.DateType):
+ schema["type"] = "string"
+ schema["format"] = "date"
+ elif isinstance(t, mi.TimeDeltaType):
+ schema["type"] = "string"
+ schema["format"] = "duration"
+ elif isinstance(t, mi.UUIDType):
+ schema["type"] = "string"
+ schema["format"] = "uuid"
+ elif isinstance(t, mi.DecimalType):
+ schema["type"] = "string"
+ schema["format"] = "decimal"
+ elif isinstance(t, mi.CollectionType):
+ schema["type"] = "array"
+ if not isinstance(t.item_type, mi.AnyType):
+ schema["items"] = self.to_schema(t.item_type)
+ if t.max_length is not None:
+ schema["maxItems"] = t.max_length
+ if t.min_length is not None:
+ schema["minItems"] = t.min_length
+ elif isinstance(t, mi.TupleType):
+ schema["type"] = "array"
+ schema["minItems"] = schema["maxItems"] = len(t.item_types)
+ if t.item_types:
+ schema["prefixItems"] = [self.to_schema(i) for i in t.item_types]
+ schema["items"] = False
+ elif isinstance(t, mi.DictType):
+ schema["type"] = "object"
+ # If there are restrictions on the keys, specify them as propertyNames
+ if isinstance(key_type := t.key_type, mi.StrType):
+ property_names: dict[str, Any] = {}
+ if key_type.min_length is not None:
+ property_names["minLength"] = key_type.min_length
+ if key_type.max_length is not None:
+ property_names["maxLength"] = key_type.max_length
+ if key_type.pattern is not None:
+ property_names["pattern"] = key_type.pattern
+ if property_names:
+ schema["propertyNames"] = property_names
+ if not isinstance(t.value_type, mi.AnyType):
+ schema["additionalProperties"] = self.to_schema(t.value_type)
+ if t.max_length is not None:
+ schema["maxProperties"] = t.max_length
+ if t.min_length is not None:
+ schema["minProperties"] = t.min_length
+ elif isinstance(t, mi.UnionType):
+ structs = {}
+ other = []
+ tag_field = None
+ for subtype in t.types:
+ real_type = subtype
+ while isinstance(real_type, mi.Metadata):
+ real_type = real_type.type
+ if isinstance(real_type, mi.StructType) and not real_type.array_like:
+ tag_field = real_type.tag_field
+ structs[real_type.tag] = real_type
+ else:
+ other.append(subtype)
+
+ options = [self.to_schema(a) for a in other]
+
+ if len(structs) >= 2:
+ mapping = {
+ k: self.ref_template.format(name=self.name_map[v.cls])
+ for k, v in structs.items()
+ }
+ struct_schema = {
+ "anyOf": [self.to_schema(v) for v in structs.values()],
+ "discriminator": {"propertyName": tag_field, "mapping": mapping},
+ }
+ if options:
+ options.append(struct_schema)
+ schema["anyOf"] = options
+ else:
+ schema.update(struct_schema)
+ elif len(structs) == 1:
+ _, subtype = structs.popitem()
+ options.append(self.to_schema(subtype))
+ schema["anyOf"] = options
+ else:
+ schema["anyOf"] = options
+ elif isinstance(t, mi.LiteralType):
+ schema["enum"] = sorted(t.values)
+ elif isinstance(t, mi.EnumType):
+ schema.setdefault("title", t.cls.__name__)
+ if doc := _get_doc(t):
+ schema.setdefault("description", doc)
+ schema["enum"] = sorted(e.value for e in t.cls)
+ elif isinstance(t, mi.StructType):
+ schema.setdefault("title", _get_class_name(t.cls))
+ if doc := _get_doc(t):
+ schema.setdefault("description", doc)
+ required = []
+ names = []
+ fields = []
+
+ if t.tag_field is not None:
+ required.append(t.tag_field)
+ names.append(t.tag_field)
+ fields.append({"enum": [t.tag]})
+
+ for field in t.fields:
+ field_schema = self.to_schema(field.type)
+ if field.required:
+ required.append(field.encode_name)
+ elif field.default is not mi.NODEFAULT:
+ field_schema["default"] = to_builtins(field.default, str_keys=True)
+ elif field.default_factory in (list, dict, set, bytearray):
+ field_schema["default"] = field.default_factory()
+ names.append(field.encode_name)
+ fields.append(field_schema)
+
+ if t.array_like:
+ n_trailing_defaults = 0
+ for n_trailing_defaults, f in enumerate(reversed(t.fields)):
+ if f.required:
+ break
+ schema["type"] = "array"
+ schema["prefixItems"] = fields
+ schema["minItems"] = len(fields) - n_trailing_defaults
+ if t.forbid_unknown_fields:
+ schema["maxItems"] = len(fields)
+ else:
+ schema["type"] = "object"
+ schema["properties"] = dict(zip(names, fields))
+ schema["required"] = required
+ if t.forbid_unknown_fields:
+ schema["additionalProperties"] = False
+ elif isinstance(t, (mi.TypedDictType, mi.DataclassType, mi.NamedTupleType)):
+ schema.setdefault("title", _get_class_name(t.cls))
+ if doc := _get_doc(t):
+ schema.setdefault("description", doc)
+ names = []
+ fields = []
+ required = []
+ for field in t.fields:
+ field_schema = self.to_schema(field.type)
+ if field.required:
+ required.append(field.encode_name)
+ elif field.default is not mi.NODEFAULT:
+ field_schema["default"] = to_builtins(field.default, str_keys=True)
+ names.append(field.encode_name)
+ fields.append(field_schema)
+ if isinstance(t, mi.NamedTupleType):
+ schema["type"] = "array"
+ schema["prefixItems"] = fields
+ schema["minItems"] = len(required)
+ schema["maxItems"] = len(fields)
+ else:
+ schema["type"] = "object"
+ schema["properties"] = dict(zip(names, fields))
+ schema["required"] = required
+ elif isinstance(t, mi.ExtType):
+ raise TypeError("json-schema doesn't support msgpack Ext types")
+ elif isinstance(t, mi.CustomType):
+ if self.schema_hook:
+ try:
+ schema = mi._merge_json(self.schema_hook(t.cls), schema)
+ except NotImplementedError:
+ pass
+ if not schema:
+ raise TypeError(
+ "Generating JSON schema for custom types requires either:\n"
+ "- specifying a `schema_hook`\n"
+ "- annotating the type with `Meta(extra_json_schema=...)`\n"
+ "\n"
+ f"type {t.cls!r} is not supported"
+ )
+ else:
+ # This should be unreachable
+ raise TypeError(f"json-schema doesn't support type {t!r}")
+
+ return schema
diff --git a/venv/lib/python3.11/site-packages/msgspec/_utils.py b/venv/lib/python3.11/site-packages/msgspec/_utils.py
new file mode 100644
index 0000000..ddf6f27
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/_utils.py
@@ -0,0 +1,289 @@
+# type: ignore
+import collections
+import sys
+import typing
+
+try:
+ from typing_extensions import _AnnotatedAlias
+except Exception:
+ try:
+ from typing import _AnnotatedAlias
+ except Exception:
+ _AnnotatedAlias = None
+
+try:
+ from typing_extensions import get_type_hints as _get_type_hints
+except Exception:
+ from typing import get_type_hints as _get_type_hints
+
+try:
+ from typing_extensions import NotRequired, Required
+except Exception:
+ try:
+ from typing import NotRequired, Required
+ except Exception:
+ Required = NotRequired = None
+
+
+if Required is None and _AnnotatedAlias is None:
+ # No extras available, so no `include_extras`
+ get_type_hints = _get_type_hints
+else:
+
+ def get_type_hints(obj):
+ return _get_type_hints(obj, include_extras=True)
+
+
+# The `is_class` argument was new in 3.11, but was backported to 3.9 and 3.10.
+# It's _likely_ to be available for 3.9/3.10, but may not be. Easiest way to
+# check is to try it and see. This check can be removed when we drop support
+# for Python 3.10.
+try:
+ typing.ForwardRef("Foo", is_class=True)
+except TypeError:
+
+ def _forward_ref(value):
+ return typing.ForwardRef(value, is_argument=False)
+
+else:
+
+ def _forward_ref(value):
+ return typing.ForwardRef(value, is_argument=False, is_class=True)
+
+
+def _apply_params(obj, mapping):
+ if params := getattr(obj, "__parameters__", None):
+ args = tuple(mapping.get(p, p) for p in params)
+ return obj[args]
+ elif isinstance(obj, typing.TypeVar):
+ return mapping.get(obj, obj)
+ return obj
+
+
+def _get_class_mro_and_typevar_mappings(obj):
+ mapping = {}
+
+ if isinstance(obj, type):
+ cls = obj
+ else:
+ cls = obj.__origin__
+
+ def inner(c, scope):
+ if isinstance(c, type):
+ cls = c
+ new_scope = {}
+ else:
+ cls = getattr(c, "__origin__", None)
+ if cls in (None, object, typing.Generic) or cls in mapping:
+ return
+ params = cls.__parameters__
+ args = tuple(_apply_params(a, scope) for a in c.__args__)
+ assert len(params) == len(args)
+ mapping[cls] = new_scope = dict(zip(params, args))
+
+ if issubclass(cls, typing.Generic):
+ bases = getattr(cls, "__orig_bases__", cls.__bases__)
+ for b in bases:
+ inner(b, new_scope)
+
+ inner(obj, {})
+ return cls.__mro__, mapping
+
+
+def get_class_annotations(obj):
+ """Get the annotations for a class.
+
+ This is similar to ``typing.get_type_hints``, except:
+
+ - We maintain it
+ - It leaves extras like ``Annotated``/``ClassVar`` alone
+ - It resolves any parametrized generics in the class mro. The returned
+ mapping may still include ``TypeVar`` values, but those should be treated
+ as their unparametrized variants (i.e. equal to ``Any`` for the common case).
+
+ Note that this function doesn't check that Generic types are being used
+ properly - invalid uses of `Generic` may slip through without complaint.
+
+ The assumption here is that the user is making use of a static analysis
+ tool like ``mypy``/``pyright`` already, which would catch misuse of these
+ APIs.
+ """
+ hints = {}
+ mro, typevar_mappings = _get_class_mro_and_typevar_mappings(obj)
+
+ for cls in mro:
+ if cls in (typing.Generic, object):
+ continue
+
+ mapping = typevar_mappings.get(cls)
+ cls_locals = dict(vars(cls))
+ cls_globals = getattr(sys.modules.get(cls.__module__, None), "__dict__", {})
+
+ ann = cls.__dict__.get("__annotations__", {})
+ for name, value in ann.items():
+ if name in hints:
+ continue
+ if value is None:
+ value = type(None)
+ elif isinstance(value, str):
+ value = _forward_ref(value)
+ value = typing._eval_type(value, cls_locals, cls_globals)
+ if mapping is not None:
+ value = _apply_params(value, mapping)
+ hints[name] = value
+ return hints
+
+
+# A mapping from a type annotation (or annotation __origin__) to the concrete
+# python type that msgspec will use when decoding. THIS IS PRIVATE FOR A
+# REASON. DON'T MUCK WITH THIS.
+_CONCRETE_TYPES = {
+ list: list,
+ tuple: tuple,
+ set: set,
+ frozenset: frozenset,
+ dict: dict,
+ typing.List: list,
+ typing.Tuple: tuple,
+ typing.Set: set,
+ typing.FrozenSet: frozenset,
+ typing.Dict: dict,
+ typing.Collection: list,
+ typing.MutableSequence: list,
+ typing.Sequence: list,
+ typing.MutableMapping: dict,
+ typing.Mapping: dict,
+ typing.MutableSet: set,
+ typing.AbstractSet: set,
+ collections.abc.Collection: list,
+ collections.abc.MutableSequence: list,
+ collections.abc.Sequence: list,
+ collections.abc.MutableSet: set,
+ collections.abc.Set: set,
+ collections.abc.MutableMapping: dict,
+ collections.abc.Mapping: dict,
+}
+
+
+def get_typeddict_info(obj):
+ if isinstance(obj, type):
+ cls = obj
+ else:
+ cls = obj.__origin__
+
+ raw_hints = get_class_annotations(obj)
+
+ if hasattr(cls, "__required_keys__"):
+ required = set(cls.__required_keys__)
+ elif cls.__total__:
+ required = set(raw_hints)
+ else:
+ required = set()
+
+ # Both `typing.TypedDict` and `typing_extensions.TypedDict` have a bug
+ # where `Required`/`NotRequired` aren't properly detected at runtime when
+ # `__future__.annotations` is enabled, meaning the `__required_keys__`
+ # isn't correct. This code block works around this issue by amending the
+ # set of required keys as needed, while also stripping off any
+ # `Required`/`NotRequired` wrappers.
+ hints = {}
+ for k, v in raw_hints.items():
+ origin = getattr(v, "__origin__", False)
+ if origin is Required:
+ required.add(k)
+ hints[k] = v.__args__[0]
+ elif origin is NotRequired:
+ required.discard(k)
+ hints[k] = v.__args__[0]
+ else:
+ hints[k] = v
+ return hints, required
+
+
+def get_dataclass_info(obj):
+ if isinstance(obj, type):
+ cls = obj
+ else:
+ cls = obj.__origin__
+ hints = get_class_annotations(obj)
+ required = []
+ optional = []
+ defaults = []
+
+ if hasattr(cls, "__dataclass_fields__"):
+ from dataclasses import _FIELD, _FIELD_INITVAR, MISSING
+
+ for field in cls.__dataclass_fields__.values():
+ if field._field_type is not _FIELD:
+ if field._field_type is _FIELD_INITVAR:
+ raise TypeError(
+ "dataclasses with `InitVar` fields are not supported"
+ )
+ continue
+ name = field.name
+ typ = hints[name]
+ if field.default is not MISSING:
+ defaults.append(field.default)
+ optional.append((name, typ, False))
+ elif field.default_factory is not MISSING:
+ defaults.append(field.default_factory)
+ optional.append((name, typ, True))
+ else:
+ required.append((name, typ, False))
+
+ required.extend(optional)
+
+ pre_init = None
+ post_init = getattr(cls, "__post_init__", None)
+ else:
+ from attrs import NOTHING, Factory
+
+ fields_with_validators = []
+
+ for field in cls.__attrs_attrs__:
+ name = field.name
+ typ = hints[name]
+ default = field.default
+ if default is not NOTHING:
+ if isinstance(default, Factory):
+ if default.takes_self:
+ raise NotImplementedError(
+ "Support for default factories with `takes_self=True` "
+ "is not implemented. File a GitHub issue if you need "
+ "this feature!"
+ )
+ defaults.append(default.factory)
+ optional.append((name, typ, True))
+ else:
+ defaults.append(default)
+ optional.append((name, typ, False))
+ else:
+ required.append((name, typ, False))
+
+ if field.validator is not None:
+ fields_with_validators.append(field)
+
+ required.extend(optional)
+
+ pre_init = getattr(cls, "__attrs_pre_init__", None)
+ post_init = getattr(cls, "__attrs_post_init__", None)
+
+ if fields_with_validators:
+ post_init = _wrap_attrs_validators(fields_with_validators, post_init)
+
+ return cls, tuple(required), tuple(defaults), pre_init, post_init
+
+
+def _wrap_attrs_validators(fields, post_init):
+ def inner(obj):
+ for field in fields:
+ field.validator(obj, field, getattr(obj, field.name))
+ if post_init is not None:
+ post_init(obj)
+
+ return inner
+
+
+def rebuild(cls, kwargs):
+ """Used to unpickle Structs with keyword-only fields"""
+ return cls(**kwargs)
diff --git a/venv/lib/python3.11/site-packages/msgspec/_version.py b/venv/lib/python3.11/site-packages/msgspec/_version.py
new file mode 100644
index 0000000..742e7b8
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/_version.py
@@ -0,0 +1,21 @@
+
+# This file was generated by 'versioneer.py' (0.19) from
+# revision-control system data, or from the parent directory name of an
+# unpacked source archive. Distribution tarballs contain a pre-generated copy
+# of this file.
+
+import json
+
+version_json = '''
+{
+ "date": "2024-01-21T21:57:36-0600",
+ "dirty": false,
+ "error": null,
+ "full-revisionid": "510d40160c5199fb562bc6f880e12f31cd697c6a",
+ "version": "0.18.6"
+}
+''' # END VERSION_JSON
+
+
+def get_versions():
+ return json.loads(version_json)
diff --git a/venv/lib/python3.11/site-packages/msgspec/inspect.py b/venv/lib/python3.11/site-packages/msgspec/inspect.py
new file mode 100644
index 0000000..2f5a804
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/inspect.py
@@ -0,0 +1,1005 @@
+from __future__ import annotations
+
+import datetime
+import decimal
+import enum
+import uuid
+from collections.abc import Iterable
+from typing import (
+ Any,
+ Final,
+ Literal,
+ Tuple,
+ Type as typing_Type,
+ TypeVar,
+ Union,
+)
+
+try:
+ from types import UnionType as _types_UnionType # type: ignore
+except Exception:
+ _types_UnionType = type("UnionType", (), {}) # type: ignore
+
+try:
+ from typing import TypeAliasType as _TypeAliasType # type: ignore
+except Exception:
+ _TypeAliasType = type("TypeAliasType", (), {}) # type: ignore
+
+import msgspec
+from msgspec import NODEFAULT, UNSET, UnsetType as _UnsetType
+
+from ._core import ( # type: ignore
+ Factory as _Factory,
+ to_builtins as _to_builtins,
+)
+from ._utils import ( # type: ignore
+ _CONCRETE_TYPES,
+ _AnnotatedAlias,
+ get_class_annotations as _get_class_annotations,
+ get_dataclass_info as _get_dataclass_info,
+ get_typeddict_info as _get_typeddict_info,
+)
+
+__all__ = (
+ "type_info",
+ "multi_type_info",
+ "Type",
+ "Metadata",
+ "AnyType",
+ "NoneType",
+ "BoolType",
+ "IntType",
+ "FloatType",
+ "StrType",
+ "BytesType",
+ "ByteArrayType",
+ "MemoryViewType",
+ "DateTimeType",
+ "TimeType",
+ "DateType",
+ "TimeDeltaType",
+ "UUIDType",
+ "DecimalType",
+ "ExtType",
+ "RawType",
+ "EnumType",
+ "LiteralType",
+ "CustomType",
+ "UnionType",
+ "CollectionType",
+ "ListType",
+ "SetType",
+ "FrozenSetType",
+ "VarTupleType",
+ "TupleType",
+ "DictType",
+ "Field",
+ "TypedDictType",
+ "NamedTupleType",
+ "DataclassType",
+ "StructType",
+)
+
+
+def __dir__():
+ return __all__
+
+
+class Type(msgspec.Struct):
+ """The base Type."""
+
+
+class Metadata(Type):
+ """A type wrapping a subtype with additional metadata.
+
+ Parameters
+ ----------
+ type: Type
+ The subtype.
+ extra_json_schema: dict, optional
+ A dict of extra fields to set for the subtype when generating a
+ json-schema.
+ extra: dict, optional
+ A dict of extra user-defined metadata attached to the subtype.
+ """
+
+ type: Type
+ extra_json_schema: Union[dict, None] = None
+ extra: Union[dict, None] = None
+
+
+class AnyType(Type):
+ """A type corresponding to `typing.Any`."""
+
+
+class NoneType(Type):
+ """A type corresponding to `None`."""
+
+
+class BoolType(Type):
+ """A type corresponding to `bool`."""
+
+
+class IntType(Type):
+ """A type corresponding to `int`.
+
+ Parameters
+ ----------
+ gt: int, optional
+ If set, an instance of this type must be greater than ``gt``.
+ ge: int, optional
+ If set, an instance of this type must be greater than or equal to ``ge``.
+ lt: int, optional
+ If set, an instance of this type must be less than to ``lt``.
+ le: int, optional
+ If set, an instance of this type must be less than or equal to ``le``.
+ multiple_of: int, optional
+ If set, an instance of this type must be a multiple of ``multiple_of``.
+ """
+
+ gt: Union[int, None] = None
+ ge: Union[int, None] = None
+ lt: Union[int, None] = None
+ le: Union[int, None] = None
+ multiple_of: Union[int, None] = None
+
+
+class FloatType(Type):
+ """A type corresponding to `float`.
+
+ Parameters
+ ----------
+ gt: float, optional
+ If set, an instance of this type must be greater than ``gt``.
+ ge: float, optional
+ If set, an instance of this type must be greater than or equal to ``ge``.
+ lt: float, optional
+ If set, an instance of this type must be less than to ``lt``.
+ le: float, optional
+ If set, an instance of this type must be less than or equal to ``le``.
+ multiple_of: float, optional
+ If set, an instance of this type must be a multiple of ``multiple_of``.
+ """
+
+ gt: Union[float, None] = None
+ ge: Union[float, None] = None
+ lt: Union[float, None] = None
+ le: Union[float, None] = None
+ multiple_of: Union[float, None] = None
+
+
+class StrType(Type):
+ """A type corresponding to `str`.
+
+ Parameters
+ ----------
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ pattern: str, optional
+ If set, an instance of this type must match against this regex pattern.
+ Note that the pattern is treated as **unanchored**.
+ """
+
+ min_length: Union[int, None] = None
+ max_length: Union[int, None] = None
+ pattern: Union[str, None] = None
+
+
+class BytesType(Type):
+ """A type corresponding to `bytes`.
+
+ Parameters
+ ----------
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+ min_length: Union[int, None] = None
+ max_length: Union[int, None] = None
+
+
+class ByteArrayType(Type):
+ """A type corresponding to `bytearray`.
+
+ Parameters
+ ----------
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+ min_length: Union[int, None] = None
+ max_length: Union[int, None] = None
+
+
+class MemoryViewType(Type):
+ """A type corresponding to `memoryview`.
+
+ Parameters
+ ----------
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+ min_length: Union[int, None] = None
+ max_length: Union[int, None] = None
+
+
+class DateTimeType(Type):
+ """A type corresponding to `datetime.datetime`.
+
+ Parameters
+ ----------
+ tz: bool
+ The timezone-requirements for an instance of this type. ``True``
+ indicates a timezone-aware value is required, ``False`` indicates a
+ timezone-aware value is required. The default is ``None``, which
+ accepts either timezone-aware or timezone-naive values.
+ """
+
+ tz: Union[bool, None] = None
+
+
+class TimeType(Type):
+ """A type corresponding to `datetime.time`.
+
+ Parameters
+ ----------
+ tz: bool
+ The timezone-requirements for an instance of this type. ``True``
+ indicates a timezone-aware value is required, ``False`` indicates a
+ timezone-aware value is required. The default is ``None``, which
+ accepts either timezone-aware or timezone-naive values.
+ """
+
+ tz: Union[bool, None] = None
+
+
+class DateType(Type):
+ """A type corresponding to `datetime.date`."""
+
+
+class TimeDeltaType(Type):
+ """A type corresponding to `datetime.timedelta`."""
+
+
+class UUIDType(Type):
+ """A type corresponding to `uuid.UUID`."""
+
+
+class DecimalType(Type):
+ """A type corresponding to `decimal.Decimal`."""
+
+
+class ExtType(Type):
+ """A type corresponding to `msgspec.msgpack.Ext`."""
+
+
+class RawType(Type):
+ """A type corresponding to `msgspec.Raw`."""
+
+
+class EnumType(Type):
+ """A type corresponding to an `enum.Enum` type.
+
+ Parameters
+ ----------
+ cls: type
+ The corresponding `enum.Enum` type.
+ """
+
+ cls: typing_Type[enum.Enum]
+
+
+class LiteralType(Type):
+ """A type corresponding to a `typing.Literal` type.
+
+ Parameters
+ ----------
+ values: tuple
+ A tuple of possible values for this literal instance. Only `str` or
+ `int` literals are supported.
+ """
+
+ values: Union[Tuple[str, ...], Tuple[int, ...]]
+
+
+class CustomType(Type):
+ """A custom type.
+
+ Parameters
+ ----------
+ cls: type
+ The corresponding custom type.
+ """
+
+ cls: type
+
+
+class UnionType(Type):
+ """A union type.
+
+ Parameters
+ ----------
+ types: Tuple[Type, ...]
+ A tuple of possible types for this union.
+ """
+
+ types: Tuple[Type, ...]
+
+ @property
+ def includes_none(self) -> bool:
+ """A helper for checking whether ``None`` is included in this union."""
+ return any(isinstance(t, NoneType) for t in self.types)
+
+
+class CollectionType(Type):
+ """A collection type.
+
+ This is the base type shared by collection types like `ListType`,
+ `SetType`, etc.
+
+ Parameters
+ ----------
+ item_type: Type
+ The item type.
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+ item_type: Type
+ min_length: Union[int, None] = None
+ max_length: Union[int, None] = None
+
+
+class ListType(CollectionType):
+ """A type corresponding to a `list`.
+
+ Parameters
+ ----------
+ item_type: Type
+ The item type.
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+
+class VarTupleType(CollectionType):
+ """A type corresponding to a variadic `tuple`.
+
+ Parameters
+ ----------
+ item_type: Type
+ The item type.
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+
+class SetType(CollectionType):
+ """A type corresponding to a `set`.
+
+ Parameters
+ ----------
+ item_type: Type
+ The item type.
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+
+class FrozenSetType(CollectionType):
+ """A type corresponding to a `frozenset`.
+
+ Parameters
+ ----------
+ item_type: Type
+ The item type.
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+
+class TupleType(Type):
+ """A type corresponding to `tuple`.
+
+ Parameters
+ ----------
+ item_types: Tuple[Type, ...]
+ A tuple of types for each element in the tuple.
+ """
+
+ item_types: Tuple[Type, ...]
+
+
+class DictType(Type):
+ """A type corresponding to `dict`.
+
+ Parameters
+ ----------
+ key_type: Type
+ The key type.
+ value_type: Type
+ The value type.
+ min_length: int, optional
+ If set, an instance of this type must have length greater than or equal
+ to ``min_length``.
+ max_length: int, optional
+ If set, an instance of this type must have length less than or equal
+ to ``max_length``.
+ """
+
+ key_type: Type
+ value_type: Type
+ min_length: Union[int, None] = None
+ max_length: Union[int, None] = None
+
+
+class Field(msgspec.Struct):
+ """A record describing a field in an object-like type.
+
+ Parameters
+ ----------
+ name: str
+ The field name as seen by Python code (e.g. ``field_one``).
+ encode_name: str
+ The name used when encoding/decoding the field. This may differ if
+ the field is renamed (e.g. ``fieldOne``).
+ type: Type
+ The field type.
+ required: bool, optional
+ Whether the field is required. Note that if `required` is False doesn't
+ necessarily mean that `default` or `default_factory` will be set -
+ optional fields may exist with no default value.
+ default: Any, optional
+ A default value for the field. Will be `NODEFAULT` if no default value
+ is set.
+ default_factory: Any, optional
+ A callable that creates a default value for the field. Will be
+ `NODEFAULT` if no ``default_factory`` is set.
+ """
+
+ name: str
+ encode_name: str
+ type: Type
+ required: bool = True
+ default: Any = msgspec.field(default_factory=lambda: NODEFAULT)
+ default_factory: Any = msgspec.field(default_factory=lambda: NODEFAULT)
+
+
+class TypedDictType(Type):
+ """A type corresponding to a `typing.TypedDict` type.
+
+ Parameters
+ ----------
+ cls: type
+ The corresponding TypedDict type.
+ fields: Tuple[Field, ...]
+ A tuple of fields in the TypedDict.
+ """
+
+ cls: type
+ fields: Tuple[Field, ...]
+
+
+class NamedTupleType(Type):
+ """A type corresponding to a `typing.NamedTuple` type.
+
+ Parameters
+ ----------
+ cls: type
+ The corresponding NamedTuple type.
+ fields: Tuple[Field, ...]
+ A tuple of fields in the NamedTuple.
+ """
+
+ cls: type
+ fields: Tuple[Field, ...]
+
+
+class DataclassType(Type):
+ """A type corresponding to a `dataclasses` or `attrs` type.
+
+ Parameters
+ ----------
+ cls: type
+ The corresponding dataclass type.
+ fields: Tuple[Field, ...]
+ A tuple of fields in the dataclass.
+ """
+
+ cls: type
+ fields: Tuple[Field, ...]
+
+
+class StructType(Type):
+ """A type corresponding to a `msgspec.Struct` type.
+
+ Parameters
+ ----------
+ cls: type
+ The corresponding Struct type.
+ fields: Tuple[Field, ...]
+ A tuple of fields in the Struct.
+ tag_field: str or None, optional
+ If set, the field name used for the tag in a tagged union.
+ tag: str, int, or None, optional
+ If set, the value used for the tag in a tagged union.
+ array_like: bool, optional
+ Whether the struct is encoded as an array rather than an object.
+ forbid_unknown_fields: bool, optional
+ If ``False`` (the default) unknown fields are ignored when decoding. If
+ ``True`` any unknown fields will result in an error.
+ """
+
+ cls: typing_Type[msgspec.Struct]
+ fields: Tuple[Field, ...]
+ tag_field: Union[str, None] = None
+ tag: Union[str, int, None] = None
+ array_like: bool = False
+ forbid_unknown_fields: bool = False
+
+
+def multi_type_info(types: Iterable[Any]) -> tuple[Type, ...]:
+ """Get information about multiple msgspec-compatible types.
+
+ Parameters
+ ----------
+ types: an iterable of types
+ The types to get info about.
+
+ Returns
+ -------
+ tuple[Type, ...]
+
+ Examples
+ --------
+ >>> msgspec.inspect.multi_type_info([int, float, list[str]]) # doctest: +NORMALIZE_WHITESPACE
+ (IntType(gt=None, ge=None, lt=None, le=None, multiple_of=None),
+ FloatType(gt=None, ge=None, lt=None, le=None, multiple_of=None),
+ ListType(item_type=StrType(min_length=None, max_length=None, pattern=None),
+ min_length=None, max_length=None))
+ """
+ return _Translator(types).run()
+
+
+def type_info(type: Any) -> Type:
+ """Get information about a msgspec-compatible type.
+
+ Note that if you need to inspect multiple types it's more efficient to call
+ `multi_type_info` once with a sequence of types than calling `type_info`
+ multiple times.
+
+ Parameters
+ ----------
+ type: type
+ The type to get info about.
+
+ Returns
+ -------
+ Type
+
+ Examples
+ --------
+ >>> msgspec.inspect.type_info(bool)
+ BoolType()
+
+ >>> msgspec.inspect.type_info(int)
+ IntType(gt=None, ge=None, lt=None, le=None, multiple_of=None)
+
+ >>> msgspec.inspect.type_info(list[int]) # doctest: +NORMALIZE_WHITESPACE
+ ListType(item_type=IntType(gt=None, ge=None, lt=None, le=None, multiple_of=None),
+ min_length=None, max_length=None)
+ """
+ return multi_type_info([type])[0]
+
+
+# Implementation details
+def _origin_args_metadata(t):
+ # Strip wrappers (Annotated, NewType, Final) until we hit a concrete type
+ metadata = []
+ while True:
+ try:
+ origin = _CONCRETE_TYPES.get(t)
+ except TypeError:
+ # t is not hashable
+ origin = None
+
+ if origin is not None:
+ args = None
+ break
+
+ origin = getattr(t, "__origin__", None)
+ if origin is not None:
+ if type(t) is _AnnotatedAlias:
+ metadata.extend(m for m in t.__metadata__ if type(m) is msgspec.Meta)
+ t = origin
+ elif origin == Final:
+ t = t.__args__[0]
+ elif type(origin) is _TypeAliasType:
+ t = origin.__value__[t.__args__]
+ else:
+ args = getattr(t, "__args__", None)
+ origin = _CONCRETE_TYPES.get(origin, origin)
+ break
+ else:
+ supertype = getattr(t, "__supertype__", None)
+ if supertype is not None:
+ t = supertype
+ elif type(t) is _TypeAliasType:
+ t = t.__value__
+ else:
+ origin = t
+ args = None
+ break
+
+ if type(origin) is _types_UnionType:
+ args = origin.__args__
+ origin = Union
+ return origin, args, tuple(metadata)
+
+
+def _is_struct(t):
+ return type(t) is type(msgspec.Struct)
+
+
+def _is_enum(t):
+ return type(t) is enum.EnumMeta
+
+
+def _is_dataclass(t):
+ return hasattr(t, "__dataclass_fields__")
+
+
+def _is_attrs(t):
+ return hasattr(t, "__attrs_attrs__")
+
+
+def _is_typeddict(t):
+ try:
+ return issubclass(t, dict) and hasattr(t, "__total__")
+ except TypeError:
+ return False
+
+
+def _is_namedtuple(t):
+ try:
+ return issubclass(t, tuple) and hasattr(t, "_fields")
+ except TypeError:
+ return False
+
+
+def _merge_json(a, b):
+ if b:
+ a = a.copy()
+ for key, b_val in b.items():
+ if key in a:
+ a_val = a[key]
+ if isinstance(a_val, dict) and isinstance(b_val, dict):
+ a[key] = _merge_json(a_val, b_val)
+ elif isinstance(a_val, (list, tuple)) and isinstance(
+ b_val, (list, tuple)
+ ):
+ a[key] = list(a_val) + list(b_val)
+ else:
+ a[key] = b_val
+ else:
+ a[key] = b_val
+ return a
+
+
+class _Translator:
+ def __init__(self, types):
+ self.types = tuple(types)
+ self.type_hints = {}
+ self.cache = {}
+
+ def _get_class_annotations(self, t):
+ """A cached version of `get_class_annotations`"""
+ try:
+ return self.type_hints[t]
+ except KeyError:
+ out = self.type_hints[t] = _get_class_annotations(t)
+ return out
+
+ def run(self):
+ # First construct a decoder to validate the types are valid
+ from ._core import MsgpackDecoder
+
+ MsgpackDecoder(Tuple[self.types])
+ return tuple(self.translate(t) for t in self.types)
+
+ def translate(self, typ):
+ t, args, metadata = _origin_args_metadata(typ)
+
+ # Extract and merge components of any `Meta` annotations
+ constrs = {}
+ extra_json_schema = {}
+ extra = {}
+ for meta in metadata:
+ for attr in (
+ "ge",
+ "gt",
+ "le",
+ "lt",
+ "multiple_of",
+ "pattern",
+ "min_length",
+ "max_length",
+ "tz",
+ ):
+ if (val := getattr(meta, attr)) is not None:
+ constrs[attr] = val
+ for attr in ("title", "description", "examples"):
+ if (val := getattr(meta, attr)) is not None:
+ extra_json_schema[attr] = val
+ if meta.extra_json_schema is not None:
+ extra_json_schema = _merge_json(
+ extra_json_schema,
+ _to_builtins(meta.extra_json_schema, str_keys=True),
+ )
+ if meta.extra is not None:
+ extra.update(meta.extra)
+
+ out = self._translate_inner(t, args, **constrs)
+ if extra_json_schema or extra:
+ # If extra metadata is present, wrap the output type in a Metadata
+ # wrapper object
+ return Metadata(
+ out, extra_json_schema=extra_json_schema or None, extra=extra or None
+ )
+ return out
+
+ def _translate_inner(
+ self,
+ t,
+ args,
+ ge=None,
+ gt=None,
+ le=None,
+ lt=None,
+ multiple_of=None,
+ pattern=None,
+ min_length=None,
+ max_length=None,
+ tz=None,
+ ):
+ if t is Any:
+ return AnyType()
+ elif isinstance(t, TypeVar):
+ if t.__bound__ is not None:
+ return self.translate(t.__bound__)
+ return AnyType()
+ elif t is None or t is type(None):
+ return NoneType()
+ elif t is bool:
+ return BoolType()
+ elif t is int:
+ return IntType(ge=ge, gt=gt, le=le, lt=lt, multiple_of=multiple_of)
+ elif t is float:
+ return FloatType(ge=ge, gt=gt, le=le, lt=lt, multiple_of=multiple_of)
+ elif t is str:
+ return StrType(
+ min_length=min_length, max_length=max_length, pattern=pattern
+ )
+ elif t is bytes:
+ return BytesType(min_length=min_length, max_length=max_length)
+ elif t is bytearray:
+ return ByteArrayType(min_length=min_length, max_length=max_length)
+ elif t is memoryview:
+ return MemoryViewType(min_length=min_length, max_length=max_length)
+ elif t is datetime.datetime:
+ return DateTimeType(tz=tz)
+ elif t is datetime.time:
+ return TimeType(tz=tz)
+ elif t is datetime.date:
+ return DateType()
+ elif t is datetime.timedelta:
+ return TimeDeltaType()
+ elif t is uuid.UUID:
+ return UUIDType()
+ elif t is decimal.Decimal:
+ return DecimalType()
+ elif t is msgspec.Raw:
+ return RawType()
+ elif t is msgspec.msgpack.Ext:
+ return ExtType()
+ elif t is list:
+ return ListType(
+ self.translate(args[0]) if args else AnyType(),
+ min_length=min_length,
+ max_length=max_length,
+ )
+ elif t is set:
+ return SetType(
+ self.translate(args[0]) if args else AnyType(),
+ min_length=min_length,
+ max_length=max_length,
+ )
+ elif t is frozenset:
+ return FrozenSetType(
+ self.translate(args[0]) if args else AnyType(),
+ min_length=min_length,
+ max_length=max_length,
+ )
+ elif t is tuple:
+ # Handle an annoying compatibility issue:
+ # - Tuple[()] has args == ((),)
+ # - tuple[()] has args == ()
+ if args == ((),):
+ args = ()
+ if args is None:
+ return VarTupleType(
+ AnyType(), min_length=min_length, max_length=max_length
+ )
+ elif len(args) == 2 and args[-1] is ...:
+ return VarTupleType(
+ self.translate(args[0]),
+ min_length=min_length,
+ max_length=max_length,
+ )
+ else:
+ return TupleType(tuple(self.translate(a) for a in args))
+ elif t is dict:
+ return DictType(
+ self.translate(args[0]) if args else AnyType(),
+ self.translate(args[1]) if args else AnyType(),
+ min_length=min_length,
+ max_length=max_length,
+ )
+ elif t is Union:
+ args = tuple(self.translate(a) for a in args if a is not _UnsetType)
+ return args[0] if len(args) == 1 else UnionType(args)
+ elif t is Literal:
+ return LiteralType(tuple(sorted(args)))
+ elif _is_enum(t):
+ return EnumType(t)
+ elif _is_struct(t):
+ cls = t[args] if args else t
+ if cls in self.cache:
+ return self.cache[cls]
+ config = t.__struct_config__
+ self.cache[cls] = out = StructType(
+ cls,
+ (),
+ tag_field=config.tag_field,
+ tag=config.tag,
+ array_like=config.array_like,
+ forbid_unknown_fields=config.forbid_unknown_fields,
+ )
+
+ hints = self._get_class_annotations(cls)
+ npos = len(t.__struct_fields__) - len(t.__struct_defaults__)
+ fields = []
+ for name, encode_name, default_obj in zip(
+ t.__struct_fields__,
+ t.__struct_encode_fields__,
+ (NODEFAULT,) * npos + t.__struct_defaults__,
+ ):
+ if default_obj is NODEFAULT:
+ required = True
+ default = default_factory = NODEFAULT
+ elif isinstance(default_obj, _Factory):
+ required = False
+ default = NODEFAULT
+ default_factory = default_obj.factory
+ else:
+ required = False
+ default = NODEFAULT if default_obj is UNSET else default_obj
+ default_factory = NODEFAULT
+
+ field = Field(
+ name=name,
+ encode_name=encode_name,
+ type=self.translate(hints[name]),
+ required=required,
+ default=default,
+ default_factory=default_factory,
+ )
+ fields.append(field)
+
+ out.fields = tuple(fields)
+ return out
+ elif _is_typeddict(t):
+ cls = t[args] if args else t
+ if cls in self.cache:
+ return self.cache[cls]
+ self.cache[cls] = out = TypedDictType(cls, ())
+ hints, required = _get_typeddict_info(cls)
+ out.fields = tuple(
+ Field(
+ name=name,
+ encode_name=name,
+ type=self.translate(field_type),
+ required=name in required,
+ )
+ for name, field_type in sorted(hints.items())
+ )
+ return out
+ elif _is_dataclass(t) or _is_attrs(t):
+ cls = t[args] if args else t
+ if cls in self.cache:
+ return self.cache[cls]
+ self.cache[cls] = out = DataclassType(cls, ())
+ _, info, defaults, _, _ = _get_dataclass_info(cls)
+ defaults = ((NODEFAULT,) * (len(info) - len(defaults))) + defaults
+ fields = []
+ for (name, typ, is_factory), default_obj in zip(info, defaults):
+ if default_obj is NODEFAULT:
+ required = True
+ default = default_factory = NODEFAULT
+ elif is_factory:
+ required = False
+ default = NODEFAULT
+ default_factory = default_obj
+ else:
+ required = False
+ default = NODEFAULT if default_obj is UNSET else default_obj
+ default_factory = NODEFAULT
+
+ fields.append(
+ Field(
+ name=name,
+ encode_name=name,
+ type=self.translate(typ),
+ required=required,
+ default=default,
+ default_factory=default_factory,
+ )
+ )
+ out.fields = tuple(fields)
+ return out
+ elif _is_namedtuple(t):
+ cls = t[args] if args else t
+ if cls in self.cache:
+ return self.cache[cls]
+ self.cache[cls] = out = NamedTupleType(cls, ())
+ hints = self._get_class_annotations(cls)
+ out.fields = tuple(
+ Field(
+ name=name,
+ encode_name=name,
+ type=self.translate(hints.get(name, Any)),
+ required=name not in t._field_defaults,
+ default=t._field_defaults.get(name, NODEFAULT),
+ )
+ for name in t._fields
+ )
+ return out
+ else:
+ return CustomType(t)
diff --git a/venv/lib/python3.11/site-packages/msgspec/json.py b/venv/lib/python3.11/site-packages/msgspec/json.py
new file mode 100644
index 0000000..ff2069a
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/json.py
@@ -0,0 +1,8 @@
+from ._core import (
+ JSONDecoder as Decoder,
+ JSONEncoder as Encoder,
+ json_decode as decode,
+ json_encode as encode,
+ json_format as format,
+)
+from ._json_schema import schema, schema_components
diff --git a/venv/lib/python3.11/site-packages/msgspec/json.pyi b/venv/lib/python3.11/site-packages/msgspec/json.pyi
new file mode 100644
index 0000000..3b8b7e5
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/json.pyi
@@ -0,0 +1,113 @@
+from collections.abc import Iterable
+from typing import (
+ Any,
+ Callable,
+ Dict,
+ Generic,
+ Iterable,
+ Literal,
+ Optional,
+ Tuple,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+T = TypeVar("T")
+
+enc_hook_sig = Optional[Callable[[Any], Any]]
+dec_hook_sig = Optional[Callable[[type, Any], Any]]
+float_hook_sig = Optional[Callable[[str], Any]]
+schema_hook_sig = Optional[Callable[[type], dict[str, Any]]]
+
+class Encoder:
+ enc_hook: enc_hook_sig
+ decimal_format: Literal["string", "number"]
+ uuid_format: Literal["canonical", "hex"]
+ order: Literal[None, "deterministic", "sorted"]
+
+ def __init__(
+ self,
+ *,
+ enc_hook: enc_hook_sig = None,
+ decimal_format: Literal["string", "number"] = "string",
+ uuid_format: Literal["canonical", "hex"] = "canonical",
+ order: Literal[None, "deterministic", "sorted"] = None,
+ ): ...
+ def encode(self, obj: Any) -> bytes: ...
+ def encode_lines(self, items: Iterable) -> bytes: ...
+ def encode_into(
+ self, obj: Any, buffer: bytearray, offset: Optional[int] = 0
+ ) -> None: ...
+
+class Decoder(Generic[T]):
+ type: Type[T]
+ strict: bool
+ dec_hook: dec_hook_sig
+ float_hook: float_hook_sig
+
+ @overload
+ def __init__(
+ self: Decoder[Any],
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ float_hook: float_hook_sig = None,
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: Decoder[T],
+ type: Type[T] = ...,
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ float_hook: float_hook_sig = None,
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: Decoder[Any],
+ type: Any = ...,
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ float_hook: float_hook_sig = None,
+ ) -> None: ...
+ def decode(self, data: Union[bytes, str]) -> T: ...
+ def decode_lines(self, data: Union[bytes, str]) -> list[T]: ...
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+) -> Any: ...
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ type: Type[T] = ...,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+) -> T: ...
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ type: Any = ...,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+) -> Any: ...
+def encode(obj: Any, *, enc_hook: enc_hook_sig = None, order: Literal[None, "deterministic", "sorted"] = None) -> bytes: ...
+def schema(type: Any, *, schema_hook: schema_hook_sig = None) -> Dict[str, Any]: ...
+def schema_components(
+ types: Iterable[Any],
+ *,
+ schema_hook: schema_hook_sig = None,
+ ref_template: str = "#/$defs/{name}"
+) -> Tuple[Tuple[Dict[str, Any], ...], Dict[str, Any]]: ...
+@overload
+def format(buf: str, *, indent: int = 2) -> str: ...
+@overload
+def format(buf: bytes, *, indent: int = 2) -> bytes: ...
diff --git a/venv/lib/python3.11/site-packages/msgspec/msgpack.py b/venv/lib/python3.11/site-packages/msgspec/msgpack.py
new file mode 100644
index 0000000..64151a9
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/msgpack.py
@@ -0,0 +1,7 @@
+from ._core import (
+ Ext,
+ MsgpackDecoder as Decoder,
+ MsgpackEncoder as Encoder,
+ msgpack_decode as decode,
+ msgpack_encode as encode,
+)
diff --git a/venv/lib/python3.11/site-packages/msgspec/msgpack.pyi b/venv/lib/python3.11/site-packages/msgspec/msgpack.pyi
new file mode 100644
index 0000000..d1284ea
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/msgpack.pyi
@@ -0,0 +1,103 @@
+from typing import (
+ Any,
+ Callable,
+ Generic,
+ Literal,
+ Optional,
+ Type,
+ TypeVar,
+ Union,
+ overload,
+)
+
+T = TypeVar("T")
+
+enc_hook_sig = Optional[Callable[[Any], Any]]
+ext_hook_sig = Optional[Callable[[int, memoryview], Any]]
+dec_hook_sig = Optional[Callable[[type, Any], Any]]
+
+class Ext:
+ code: int
+ data: Union[bytes, bytearray, memoryview]
+ def __init__(
+ self, code: int, data: Union[bytes, bytearray, memoryview]
+ ) -> None: ...
+
+class Decoder(Generic[T]):
+ type: Type[T]
+ strict: bool
+ dec_hook: dec_hook_sig
+ ext_hook: ext_hook_sig
+ @overload
+ def __init__(
+ self: Decoder[Any],
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ ext_hook: ext_hook_sig = None,
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: Decoder[T],
+ type: Type[T] = ...,
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ ext_hook: ext_hook_sig = None,
+ ) -> None: ...
+ @overload
+ def __init__(
+ self: Decoder[Any],
+ type: Any = ...,
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ ext_hook: ext_hook_sig = None,
+ ) -> None: ...
+ def decode(self, data: bytes) -> T: ...
+
+class Encoder:
+ enc_hook: enc_hook_sig
+ decimal_format: Literal["string", "number"]
+ uuid_format: Literal["canonical", "hex", "bytes"]
+ order: Literal[None, "deterministic", "sorted"]
+ def __init__(
+ self,
+ *,
+ enc_hook: enc_hook_sig = None,
+ decimal_format: Literal["string", "number"] = "string",
+ uuid_format: Literal["canonical", "hex", "bytes"] = "canonical",
+ order: Literal[None, "deterministic", "sorted"] = None,
+ ): ...
+ def encode(self, obj: Any) -> bytes: ...
+ def encode_into(
+ self, obj: Any, buffer: bytearray, offset: Optional[int] = 0
+ ) -> None: ...
+
+@overload
+def decode(
+ buf: bytes,
+ *,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ ext_hook: ext_hook_sig = None,
+) -> Any: ...
+@overload
+def decode(
+ buf: bytes,
+ *,
+ type: Type[T] = ...,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ ext_hook: ext_hook_sig = None,
+) -> T: ...
+@overload
+def decode(
+ buf: bytes,
+ *,
+ type: Any = ...,
+ strict: bool = True,
+ dec_hook: dec_hook_sig = None,
+ ext_hook: ext_hook_sig = None,
+) -> Any: ...
+def encode(obj: Any, *, enc_hook: enc_hook_sig = None, order: Literal[None, "deterministic", "sorted"] = None) -> bytes: ...
diff --git a/venv/lib/python3.11/site-packages/msgspec/py.typed b/venv/lib/python3.11/site-packages/msgspec/py.typed
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/py.typed
diff --git a/venv/lib/python3.11/site-packages/msgspec/structs.py b/venv/lib/python3.11/site-packages/msgspec/structs.py
new file mode 100644
index 0000000..76f2fdf
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/structs.py
@@ -0,0 +1,106 @@
+from __future__ import annotations
+
+from typing import Any
+
+from . import NODEFAULT, Struct, field
+from ._core import ( # noqa
+ Factory as _Factory,
+ StructConfig,
+ asdict,
+ astuple,
+ replace,
+ force_setattr,
+)
+from ._utils import get_class_annotations as _get_class_annotations
+
+__all__ = (
+ "FieldInfo",
+ "StructConfig",
+ "asdict",
+ "astuple",
+ "fields",
+ "force_setattr",
+ "replace",
+)
+
+
+def __dir__():
+ return __all__
+
+
+class FieldInfo(Struct):
+ """A record describing a field in a struct type.
+
+ Parameters
+ ----------
+ name: str
+ The field name as seen by Python code (e.g. ``field_one``).
+ encode_name: str
+ The name used when encoding/decoding the field. This may differ if
+ the field is renamed (e.g. ``fieldOne``).
+ type: Any
+ The full field type annotation.
+ default: Any, optional
+ A default value for the field. Will be `NODEFAULT` if no default value
+ is set.
+ default_factory: Any, optional
+ A callable that creates a default value for the field. Will be
+ `NODEFAULT` if no ``default_factory`` is set.
+ """
+
+ name: str
+ encode_name: str
+ type: Any
+ default: Any = field(default_factory=lambda: NODEFAULT)
+ default_factory: Any = field(default_factory=lambda: NODEFAULT)
+
+ @property
+ def required(self) -> bool:
+ """A helper for checking whether a field is required"""
+ return self.default is NODEFAULT and self.default_factory is NODEFAULT
+
+
+def fields(type_or_instance: Struct | type[Struct]) -> tuple[FieldInfo]:
+ """Get information about the fields in a Struct.
+
+ Parameters
+ ----------
+ type_or_instance:
+ A struct type or instance.
+
+ Returns
+ -------
+ tuple[FieldInfo]
+ """
+ if isinstance(type_or_instance, Struct):
+ annotated_cls = cls = type(type_or_instance)
+ else:
+ annotated_cls = type_or_instance
+ cls = getattr(type_or_instance, "__origin__", type_or_instance)
+ if not (isinstance(cls, type) and issubclass(cls, Struct)):
+ raise TypeError("Must be called with a struct type or instance")
+
+ hints = _get_class_annotations(annotated_cls)
+ npos = len(cls.__struct_fields__) - len(cls.__struct_defaults__)
+ fields = []
+ for name, encode_name, default_obj in zip(
+ cls.__struct_fields__,
+ cls.__struct_encode_fields__,
+ (NODEFAULT,) * npos + cls.__struct_defaults__,
+ ):
+ default = default_factory = NODEFAULT
+ if isinstance(default_obj, _Factory):
+ default_factory = default_obj.factory
+ elif default_obj is not NODEFAULT:
+ default = default_obj
+
+ field = FieldInfo(
+ name=name,
+ encode_name=encode_name,
+ type=hints[name],
+ default=default,
+ default_factory=default_factory,
+ )
+ fields.append(field)
+
+ return tuple(fields)
diff --git a/venv/lib/python3.11/site-packages/msgspec/structs.pyi b/venv/lib/python3.11/site-packages/msgspec/structs.pyi
new file mode 100644
index 0000000..58432b2
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/structs.pyi
@@ -0,0 +1,37 @@
+from typing import Any, TypeVar, Union
+
+from . import NODEFAULT, Struct
+
+S = TypeVar("S", bound=Struct)
+
+def replace(struct: S, /, **changes: Any) -> S: ...
+def asdict(struct: Struct) -> dict[str, Any]: ...
+def astuple(struct: Struct) -> tuple[Any, ...]: ...
+def force_setattr(struct: Struct, name: str, value: Any) -> None: ...
+
+class StructConfig:
+ frozen: bool
+ eq: bool
+ order: bool
+ array_like: bool
+ gc: bool
+ repr_omit_defaults: bool
+ omit_defaults: bool
+ forbid_unknown_fields: bool
+ weakref: bool
+ dict: bool
+ cache_hash: bool
+ tag: Union[str, int, None]
+ tag_field: Union[str, None]
+
+class FieldInfo(Struct):
+ name: str
+ encode_name: str
+ type: Any
+ default: Any = NODEFAULT
+ default_factory: Any = NODEFAULT
+
+ @property
+ def required(self) -> bool: ...
+
+def fields(type_or_instance: Struct | type[Struct]) -> tuple[FieldInfo]: ...
diff --git a/venv/lib/python3.11/site-packages/msgspec/toml.py b/venv/lib/python3.11/site-packages/msgspec/toml.py
new file mode 100644
index 0000000..f37228d
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/toml.py
@@ -0,0 +1,190 @@
+import datetime as _datetime
+from typing import Any, Callable, Optional, Type, TypeVar, Union, overload, Literal
+
+from . import (
+ DecodeError as _DecodeError,
+ convert as _convert,
+ to_builtins as _to_builtins,
+)
+
+__all__ = ("encode", "decode")
+
+
+def __dir__():
+ return __all__
+
+
+def _import_tomllib():
+ try:
+ import tomllib # type: ignore
+
+ return tomllib
+ except ImportError:
+ pass
+
+ try:
+ import tomli # type: ignore
+
+ return tomli
+ except ImportError:
+ raise ImportError(
+ "`msgspec.toml.decode` requires `tomli` be installed.\n\n"
+ "Please either `pip` or `conda` install it as follows:\n\n"
+ " $ python -m pip install tomli # using pip\n"
+ " $ conda install tomli # or using conda"
+ ) from None
+
+
+def _import_tomli_w():
+ try:
+ import tomli_w # type: ignore
+
+ return tomli_w
+ except ImportError:
+ raise ImportError(
+ "`msgspec.toml.encode` requires `tomli_w` be installed.\n\n"
+ "Please either `pip` or `conda` install it as follows:\n\n"
+ " $ python -m pip install tomli_w # using pip\n"
+ " $ conda install tomli_w # or using conda"
+ ) from None
+
+
+def encode(
+ obj: Any,
+ *,
+ enc_hook: Optional[Callable[[Any], Any]] = None,
+ order: Literal[None, "deterministic", "sorted"] = None,
+) -> bytes:
+ """Serialize an object as TOML.
+
+ Parameters
+ ----------
+ obj : Any
+ The object to serialize.
+ enc_hook : callable, optional
+ A callable to call for objects that aren't supported msgspec types.
+ Takes the unsupported object and should return a supported object, or
+ raise a ``NotImplementedError`` if unsupported.
+ order : {None, 'deterministic', 'sorted'}, optional
+ The ordering to use when encoding unordered compound types.
+
+ - ``None``: All objects are encoded in the most efficient manner
+ matching their in-memory representations. The default.
+ - `'deterministic'`: Unordered collections (sets, dicts) are sorted to
+ ensure a consistent output between runs. Useful when
+ comparison/hashing of the encoded binary output is necessary.
+ - `'sorted'`: Like `'deterministic'`, but *all* object-like types
+ (structs, dataclasses, ...) are also sorted by field name before
+ encoding. This is slower than `'deterministic'`, but may produce more
+ human-readable output.
+
+ Returns
+ -------
+ data : bytes
+ The serialized object.
+
+ See Also
+ --------
+ decode
+ """
+ toml = _import_tomli_w()
+ msg = _to_builtins(
+ obj,
+ builtin_types=(_datetime.datetime, _datetime.date, _datetime.time),
+ str_keys=True,
+ enc_hook=enc_hook,
+ order=order,
+ )
+ return toml.dumps(msg).encode("utf-8")
+
+
+T = TypeVar("T")
+
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ strict: bool = True,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> Any:
+ pass
+
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ type: Type[T] = ...,
+ strict: bool = True,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> T:
+ pass
+
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ type: Any = ...,
+ strict: bool = True,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> Any:
+ pass
+
+
+def decode(buf, *, type=Any, strict=True, dec_hook=None):
+ """Deserialize an object from TOML.
+
+ Parameters
+ ----------
+ buf : bytes-like or str
+ The message to decode.
+ type : type, optional
+ A Python type (in type annotation form) to decode the object as. If
+ provided, the message will be type checked and decoded as the specified
+ type. Defaults to `Any`, in which case the message will be decoded
+ using the default TOML types.
+ strict : bool, optional
+ Whether type coercion rules should be strict. Setting to False enables
+ a wider set of coercion rules from string to non-string types for all
+ values. Default is True.
+ dec_hook : callable, optional
+ An optional callback for handling decoding custom types. Should have
+ the signature ``dec_hook(type: Type, obj: Any) -> Any``, where ``type``
+ is the expected message type, and ``obj`` is the decoded representation
+ composed of only basic TOML types. This hook should transform ``obj``
+ into type ``type``, or raise a ``NotImplementedError`` if unsupported.
+
+ Returns
+ -------
+ obj : Any
+ The deserialized object.
+
+ See Also
+ --------
+ encode
+ """
+ toml = _import_tomllib()
+ if isinstance(buf, str):
+ str_buf = buf
+ elif isinstance(buf, (bytes, bytearray)):
+ str_buf = buf.decode("utf-8")
+ else:
+ # call `memoryview` first, since `bytes(1)` is actually valid
+ str_buf = bytes(memoryview(buf)).decode("utf-8")
+ try:
+ obj = toml.loads(str_buf)
+ except toml.TOMLDecodeError as exc:
+ raise _DecodeError(str(exc)) from None
+
+ if type is Any:
+ return obj
+ return _convert(
+ obj,
+ type,
+ builtin_types=(_datetime.datetime, _datetime.date, _datetime.time),
+ str_keys=True,
+ strict=strict,
+ dec_hook=dec_hook,
+ )
diff --git a/venv/lib/python3.11/site-packages/msgspec/yaml.py b/venv/lib/python3.11/site-packages/msgspec/yaml.py
new file mode 100644
index 0000000..bb57ef9
--- /dev/null
+++ b/venv/lib/python3.11/site-packages/msgspec/yaml.py
@@ -0,0 +1,185 @@
+import datetime as _datetime
+from typing import Any, Callable, Optional, Type, TypeVar, Union, overload, Literal
+
+from . import (
+ DecodeError as _DecodeError,
+ convert as _convert,
+ to_builtins as _to_builtins,
+)
+
+__all__ = ("encode", "decode")
+
+
+def __dir__():
+ return __all__
+
+
+def _import_pyyaml(name):
+ try:
+ import yaml # type: ignore
+ except ImportError:
+ raise ImportError(
+ f"`msgspec.yaml.{name}` requires PyYAML be installed.\n\n"
+ "Please either `pip` or `conda` install it as follows:\n\n"
+ " $ python -m pip install pyyaml # using pip\n"
+ " $ conda install pyyaml # or using conda"
+ ) from None
+ else:
+ return yaml
+
+
+def encode(
+ obj: Any,
+ *,
+ enc_hook: Optional[Callable[[Any], Any]] = None,
+ order: Literal[None, "deterministic", "sorted"] = None,
+) -> bytes:
+ """Serialize an object as YAML.
+
+ Parameters
+ ----------
+ obj : Any
+ The object to serialize.
+ enc_hook : callable, optional
+ A callable to call for objects that aren't supported msgspec types.
+ Takes the unsupported object and should return a supported object, or
+ raise a ``NotImplementedError`` if unsupported.
+ order : {None, 'deterministic', 'sorted'}, optional
+ The ordering to use when encoding unordered compound types.
+
+ - ``None``: All objects are encoded in the most efficient manner
+ matching their in-memory representations. The default.
+ - `'deterministic'`: Unordered collections (sets, dicts) are sorted to
+ ensure a consistent output between runs. Useful when
+ comparison/hashing of the encoded binary output is necessary.
+ - `'sorted'`: Like `'deterministic'`, but *all* object-like types
+ (structs, dataclasses, ...) are also sorted by field name before
+ encoding. This is slower than `'deterministic'`, but may produce more
+ human-readable output.
+
+ Returns
+ -------
+ data : bytes
+ The serialized object.
+
+ Notes
+ -----
+ This function requires that the third-party `PyYAML library
+ <https://pyyaml.org/>`_ is installed.
+
+ See Also
+ --------
+ decode
+ """
+ yaml = _import_pyyaml("encode")
+ # Use the C extension if available
+ Dumper = getattr(yaml, "CSafeDumper", yaml.SafeDumper)
+
+ return yaml.dump_all(
+ [
+ _to_builtins(
+ obj,
+ builtin_types=(_datetime.datetime, _datetime.date),
+ enc_hook=enc_hook,
+ order=order,
+ )
+ ],
+ encoding="utf-8",
+ Dumper=Dumper,
+ allow_unicode=True,
+ sort_keys=False,
+ )
+
+
+T = TypeVar("T")
+
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ strict: bool = True,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> Any:
+ pass
+
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ type: Type[T] = ...,
+ strict: bool = True,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> T:
+ pass
+
+
+@overload
+def decode(
+ buf: Union[bytes, str],
+ *,
+ type: Any = ...,
+ strict: bool = True,
+ dec_hook: Optional[Callable[[type, Any], Any]] = None,
+) -> Any:
+ pass
+
+
+def decode(buf, *, type=Any, strict=True, dec_hook=None):
+ """Deserialize an object from YAML.
+
+ Parameters
+ ----------
+ buf : bytes-like or str
+ The message to decode.
+ type : type, optional
+ A Python type (in type annotation form) to decode the object as. If
+ provided, the message will be type checked and decoded as the specified
+ type. Defaults to `Any`, in which case the message will be decoded
+ using the default YAML types.
+ strict : bool, optional
+ Whether type coercion rules should be strict. Setting to False enables
+ a wider set of coercion rules from string to non-string types for all
+ values. Default is True.
+ dec_hook : callable, optional
+ An optional callback for handling decoding custom types. Should have
+ the signature ``dec_hook(type: Type, obj: Any) -> Any``, where ``type``
+ is the expected message type, and ``obj`` is the decoded representation
+ composed of only basic YAML types. This hook should transform ``obj``
+ into type ``type``, or raise a ``NotImplementedError`` if unsupported.
+
+ Returns
+ -------
+ obj : Any
+ The deserialized object.
+
+ Notes
+ -----
+ This function requires that the third-party `PyYAML library
+ <https://pyyaml.org/>`_ is installed.
+
+ See Also
+ --------
+ encode
+ """
+ yaml = _import_pyyaml("decode")
+ # Use the C extension if available
+ Loader = getattr(yaml, "CSafeLoader", yaml.SafeLoader)
+ if not isinstance(buf, (str, bytes)):
+ # call `memoryview` first, since `bytes(1)` is actually valid
+ buf = bytes(memoryview(buf))
+ try:
+ obj = yaml.load(buf, Loader)
+ except yaml.YAMLError as exc:
+ raise _DecodeError(str(exc)) from None
+
+ if type is Any:
+ return obj
+ return _convert(
+ obj,
+ type,
+ builtin_types=(_datetime.datetime, _datetime.date),
+ strict=strict,
+ dec_hook=dec_hook,
+ )