summaryrefslogtreecommitdiff
path: root/venv/lib/python3.11/site-packages/websockets
diff options
context:
space:
mode:
authorcyfraeviolae <cyfraeviolae>2024-04-03 03:17:55 -0400
committercyfraeviolae <cyfraeviolae>2024-04-03 03:17:55 -0400
commit12cf076118570eebbff08c6b3090e0d4798447a1 (patch)
tree3ba25e17e3c3a5e82316558ba3864b955919ff72 /venv/lib/python3.11/site-packages/websockets
parentc45662ff3923b34614ddcc8feb9195541166dcc5 (diff)
no venv
Diffstat (limited to 'venv/lib/python3.11/site-packages/websockets')
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__init__.py190
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__main__.py159
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/__init__.cpython-311.pycbin4103 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/__main__.cpython-311.pycbin6839 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/auth.cpython-311.pycbin341 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/client.cpython-311.pycbin16574 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/connection.cpython-311.pycbin621 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/datastructures.cpython-311.pycbin10164 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/exceptions.cpython-311.pycbin17210 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/frames.cpython-311.pycbin18126 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/headers.cpython-311.pycbin20147 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/http.cpython-311.pycbin994 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/http11.cpython-311.pycbin13972 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/imports.cpython-311.pycbin3903 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/protocol.cpython-311.pycbin24458 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/server.cpython-311.pycbin24729 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/streams.cpython-311.pycbin5744 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/typing.cpython-311.pycbin1255 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/uri.cpython-311.pycbin4483 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/utils.cpython-311.pycbin2472 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/__pycache__/version.cpython-311.pycbin2632 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/auth.py6
-rw-r--r--venv/lib/python3.11/site-packages/websockets/client.py360
-rw-r--r--venv/lib/python3.11/site-packages/websockets/connection.py13
-rw-r--r--venv/lib/python3.11/site-packages/websockets/datastructures.py194
-rw-r--r--venv/lib/python3.11/site-packages/websockets/exceptions.py405
-rw-r--r--venv/lib/python3.11/site-packages/websockets/extensions/__init__.py4
-rw-r--r--venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/__init__.cpython-311.pycbin324 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/base.cpython-311.pycbin4689 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/permessage_deflate.cpython-311.pycbin19873 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/extensions/base.py133
-rw-r--r--venv/lib/python3.11/site-packages/websockets/extensions/permessage_deflate.py660
-rw-r--r--venv/lib/python3.11/site-packages/websockets/frames.py470
-rw-r--r--venv/lib/python3.11/site-packages/websockets/headers.py587
-rw-r--r--venv/lib/python3.11/site-packages/websockets/http.py35
-rw-r--r--venv/lib/python3.11/site-packages/websockets/http11.py364
-rw-r--r--venv/lib/python3.11/site-packages/websockets/imports.py99
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/__init__.cpython-311.pycbin201 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/async_timeout.cpython-311.pycbin10608 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/auth.cpython-311.pycbin8251 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/client.cpython-311.pycbin28561 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/compatibility.cpython-311.pycbin544 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/framing.cpython-311.pycbin6960 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/handshake.cpython-311.pycbin7863 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/http.cpython-311.pycbin8261 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/protocol.cpython-311.pycbin65525 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/server.cpython-311.pycbin49251 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/async_timeout.py265
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/auth.py184
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/client.py705
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/compatibility.py12
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/framing.py176
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/handshake.py165
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/http.py201
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/protocol.py1645
-rw-r--r--venv/lib/python3.11/site-packages/websockets/legacy/server.py1185
-rw-r--r--venv/lib/python3.11/site-packages/websockets/protocol.py708
-rw-r--r--venv/lib/python3.11/site-packages/websockets/py.typed0
-rw-r--r--venv/lib/python3.11/site-packages/websockets/server.py580
-rw-r--r--venv/lib/python3.11/site-packages/websockets/speedups.c223
-rwxr-xr-xvenv/lib/python3.11/site-packages/websockets/speedups.cpython-311-x86_64-linux-gnu.sobin35480 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/streams.py151
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__init__.py0
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__pycache__/__init__.cpython-311.pycbin199 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__pycache__/client.cpython-311.pycbin13185 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__pycache__/connection.cpython-311.pycbin31271 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__pycache__/messages.cpython-311.pycbin10271 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__pycache__/server.cpython-311.pycbin21370 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/__pycache__/utils.cpython-311.pycbin1898 -> 0 bytes
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/client.py328
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/connection.py773
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/messages.py281
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/server.py530
-rw-r--r--venv/lib/python3.11/site-packages/websockets/sync/utils.py46
-rw-r--r--venv/lib/python3.11/site-packages/websockets/typing.py67
-rw-r--r--venv/lib/python3.11/site-packages/websockets/uri.py108
-rw-r--r--venv/lib/python3.11/site-packages/websockets/utils.py51
-rw-r--r--venv/lib/python3.11/site-packages/websockets/version.py82
79 files changed, 0 insertions, 12145 deletions
diff --git a/venv/lib/python3.11/site-packages/websockets/__init__.py b/venv/lib/python3.11/site-packages/websockets/__init__.py
deleted file mode 100644
index fdb028f..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__init__.py
+++ /dev/null
@@ -1,190 +0,0 @@
-from __future__ import annotations
-
-import typing
-
-from .imports import lazy_import
-from .version import version as __version__ # noqa: F401
-
-
-__all__ = [
- # .client
- "ClientProtocol",
- # .datastructures
- "Headers",
- "HeadersLike",
- "MultipleValuesError",
- # .exceptions
- "AbortHandshake",
- "ConnectionClosed",
- "ConnectionClosedError",
- "ConnectionClosedOK",
- "DuplicateParameter",
- "InvalidHandshake",
- "InvalidHeader",
- "InvalidHeaderFormat",
- "InvalidHeaderValue",
- "InvalidMessage",
- "InvalidOrigin",
- "InvalidParameterName",
- "InvalidParameterValue",
- "InvalidState",
- "InvalidStatus",
- "InvalidStatusCode",
- "InvalidUpgrade",
- "InvalidURI",
- "NegotiationError",
- "PayloadTooBig",
- "ProtocolError",
- "RedirectHandshake",
- "SecurityError",
- "WebSocketException",
- "WebSocketProtocolError",
- # .legacy.auth
- "BasicAuthWebSocketServerProtocol",
- "basic_auth_protocol_factory",
- # .legacy.client
- "WebSocketClientProtocol",
- "connect",
- "unix_connect",
- # .legacy.protocol
- "WebSocketCommonProtocol",
- "broadcast",
- # .legacy.server
- "WebSocketServer",
- "WebSocketServerProtocol",
- "serve",
- "unix_serve",
- # .server
- "ServerProtocol",
- # .typing
- "Data",
- "ExtensionName",
- "ExtensionParameter",
- "LoggerLike",
- "StatusLike",
- "Origin",
- "Subprotocol",
-]
-
-# When type checking, import non-deprecated aliases eagerly. Else, import on demand.
-if typing.TYPE_CHECKING:
- from .client import ClientProtocol
- from .datastructures import Headers, HeadersLike, MultipleValuesError
- from .exceptions import (
- AbortHandshake,
- ConnectionClosed,
- ConnectionClosedError,
- ConnectionClosedOK,
- DuplicateParameter,
- InvalidHandshake,
- InvalidHeader,
- InvalidHeaderFormat,
- InvalidHeaderValue,
- InvalidMessage,
- InvalidOrigin,
- InvalidParameterName,
- InvalidParameterValue,
- InvalidState,
- InvalidStatus,
- InvalidStatusCode,
- InvalidUpgrade,
- InvalidURI,
- NegotiationError,
- PayloadTooBig,
- ProtocolError,
- RedirectHandshake,
- SecurityError,
- WebSocketException,
- WebSocketProtocolError,
- )
- from .legacy.auth import (
- BasicAuthWebSocketServerProtocol,
- basic_auth_protocol_factory,
- )
- from .legacy.client import WebSocketClientProtocol, connect, unix_connect
- from .legacy.protocol import WebSocketCommonProtocol, broadcast
- from .legacy.server import (
- WebSocketServer,
- WebSocketServerProtocol,
- serve,
- unix_serve,
- )
- from .server import ServerProtocol
- from .typing import (
- Data,
- ExtensionName,
- ExtensionParameter,
- LoggerLike,
- Origin,
- StatusLike,
- Subprotocol,
- )
-else:
- lazy_import(
- globals(),
- aliases={
- # .client
- "ClientProtocol": ".client",
- # .datastructures
- "Headers": ".datastructures",
- "HeadersLike": ".datastructures",
- "MultipleValuesError": ".datastructures",
- # .exceptions
- "AbortHandshake": ".exceptions",
- "ConnectionClosed": ".exceptions",
- "ConnectionClosedError": ".exceptions",
- "ConnectionClosedOK": ".exceptions",
- "DuplicateParameter": ".exceptions",
- "InvalidHandshake": ".exceptions",
- "InvalidHeader": ".exceptions",
- "InvalidHeaderFormat": ".exceptions",
- "InvalidHeaderValue": ".exceptions",
- "InvalidMessage": ".exceptions",
- "InvalidOrigin": ".exceptions",
- "InvalidParameterName": ".exceptions",
- "InvalidParameterValue": ".exceptions",
- "InvalidState": ".exceptions",
- "InvalidStatus": ".exceptions",
- "InvalidStatusCode": ".exceptions",
- "InvalidUpgrade": ".exceptions",
- "InvalidURI": ".exceptions",
- "NegotiationError": ".exceptions",
- "PayloadTooBig": ".exceptions",
- "ProtocolError": ".exceptions",
- "RedirectHandshake": ".exceptions",
- "SecurityError": ".exceptions",
- "WebSocketException": ".exceptions",
- "WebSocketProtocolError": ".exceptions",
- # .legacy.auth
- "BasicAuthWebSocketServerProtocol": ".legacy.auth",
- "basic_auth_protocol_factory": ".legacy.auth",
- # .legacy.client
- "WebSocketClientProtocol": ".legacy.client",
- "connect": ".legacy.client",
- "unix_connect": ".legacy.client",
- # .legacy.protocol
- "WebSocketCommonProtocol": ".legacy.protocol",
- "broadcast": ".legacy.protocol",
- # .legacy.server
- "WebSocketServer": ".legacy.server",
- "WebSocketServerProtocol": ".legacy.server",
- "serve": ".legacy.server",
- "unix_serve": ".legacy.server",
- # .server
- "ServerProtocol": ".server",
- # .typing
- "Data": ".typing",
- "ExtensionName": ".typing",
- "ExtensionParameter": ".typing",
- "LoggerLike": ".typing",
- "Origin": ".typing",
- "StatusLike": "typing",
- "Subprotocol": ".typing",
- },
- deprecated_aliases={
- "framing": ".legacy",
- "handshake": ".legacy",
- "parse_uri": ".uri",
- "WebSocketURI": ".uri",
- },
- )
diff --git a/venv/lib/python3.11/site-packages/websockets/__main__.py b/venv/lib/python3.11/site-packages/websockets/__main__.py
deleted file mode 100644
index f2ea5cf..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__main__.py
+++ /dev/null
@@ -1,159 +0,0 @@
-from __future__ import annotations
-
-import argparse
-import os
-import signal
-import sys
-import threading
-
-
-try:
- import readline # noqa: F401
-except ImportError: # Windows has no `readline` normally
- pass
-
-from .sync.client import ClientConnection, connect
-from .version import version as websockets_version
-
-
-if sys.platform == "win32":
-
- def win_enable_vt100() -> None:
- """
- Enable VT-100 for console output on Windows.
-
- See also https://bugs.python.org/issue29059.
-
- """
- import ctypes
-
- STD_OUTPUT_HANDLE = ctypes.c_uint(-11)
- INVALID_HANDLE_VALUE = ctypes.c_uint(-1)
- ENABLE_VIRTUAL_TERMINAL_PROCESSING = 0x004
-
- handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
- if handle == INVALID_HANDLE_VALUE:
- raise RuntimeError("unable to obtain stdout handle")
-
- cur_mode = ctypes.c_uint()
- if ctypes.windll.kernel32.GetConsoleMode(handle, ctypes.byref(cur_mode)) == 0:
- raise RuntimeError("unable to query current console mode")
-
- # ctypes ints lack support for the required bit-OR operation.
- # Temporarily convert to Py int, do the OR and convert back.
- py_int_mode = int.from_bytes(cur_mode, sys.byteorder)
- new_mode = ctypes.c_uint(py_int_mode | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
-
- if ctypes.windll.kernel32.SetConsoleMode(handle, new_mode) == 0:
- raise RuntimeError("unable to set console mode")
-
-
-def print_during_input(string: str) -> None:
- sys.stdout.write(
- # Save cursor position
- "\N{ESC}7"
- # Add a new line
- "\N{LINE FEED}"
- # Move cursor up
- "\N{ESC}[A"
- # Insert blank line, scroll last line down
- "\N{ESC}[L"
- # Print string in the inserted blank line
- f"{string}\N{LINE FEED}"
- # Restore cursor position
- "\N{ESC}8"
- # Move cursor down
- "\N{ESC}[B"
- )
- sys.stdout.flush()
-
-
-def print_over_input(string: str) -> None:
- sys.stdout.write(
- # Move cursor to beginning of line
- "\N{CARRIAGE RETURN}"
- # Delete current line
- "\N{ESC}[K"
- # Print string
- f"{string}\N{LINE FEED}"
- )
- sys.stdout.flush()
-
-
-def print_incoming_messages(websocket: ClientConnection, stop: threading.Event) -> None:
- for message in websocket:
- if isinstance(message, str):
- print_during_input("< " + message)
- else:
- print_during_input("< (binary) " + message.hex())
- if not stop.is_set():
- # When the server closes the connection, raise KeyboardInterrupt
- # in the main thread to exit the program.
- if sys.platform == "win32":
- ctrl_c = signal.CTRL_C_EVENT
- else:
- ctrl_c = signal.SIGINT
- os.kill(os.getpid(), ctrl_c)
-
-
-def main() -> None:
- # Parse command line arguments.
- parser = argparse.ArgumentParser(
- prog="python -m websockets",
- description="Interactive WebSocket client.",
- add_help=False,
- )
- group = parser.add_mutually_exclusive_group()
- group.add_argument("--version", action="store_true")
- group.add_argument("uri", metavar="<uri>", nargs="?")
- args = parser.parse_args()
-
- if args.version:
- print(f"websockets {websockets_version}")
- return
-
- if args.uri is None:
- parser.error("the following arguments are required: <uri>")
-
- # If we're on Windows, enable VT100 terminal support.
- if sys.platform == "win32":
- try:
- win_enable_vt100()
- except RuntimeError as exc:
- sys.stderr.write(
- f"Unable to set terminal to VT100 mode. This is only "
- f"supported since Win10 anniversary update. Expect "
- f"weird symbols on the terminal.\nError: {exc}\n"
- )
- sys.stderr.flush()
-
- try:
- websocket = connect(args.uri)
- except Exception as exc:
- print(f"Failed to connect to {args.uri}: {exc}.")
- sys.exit(1)
- else:
- print(f"Connected to {args.uri}.")
-
- stop = threading.Event()
-
- # Start the thread that reads messages from the connection.
- thread = threading.Thread(target=print_incoming_messages, args=(websocket, stop))
- thread.start()
-
- # Read from stdin in the main thread in order to receive signals.
- try:
- while True:
- # Since there's no size limit, put_nowait is identical to put.
- message = input("> ")
- websocket.send(message)
- except (KeyboardInterrupt, EOFError): # ^C, ^D
- stop.set()
- websocket.close()
- print_over_input("Connection closed.")
-
- thread.join()
-
-
-if __name__ == "__main__":
- main()
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 4a3f173..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/__main__.cpython-311.pyc
deleted file mode 100644
index d859e45..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/__main__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/auth.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/auth.cpython-311.pyc
deleted file mode 100644
index 075acb9..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/auth.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/client.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/client.cpython-311.pyc
deleted file mode 100644
index 10badfc..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/client.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/connection.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/connection.cpython-311.pyc
deleted file mode 100644
index 8d509f6..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/connection.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/datastructures.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/datastructures.cpython-311.pyc
deleted file mode 100644
index 1f52203..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/datastructures.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/exceptions.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/exceptions.cpython-311.pyc
deleted file mode 100644
index 9844f32..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/exceptions.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/frames.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/frames.cpython-311.pyc
deleted file mode 100644
index e371851..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/frames.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/headers.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/headers.cpython-311.pyc
deleted file mode 100644
index 5a2bc15..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/headers.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/http.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/http.cpython-311.pyc
deleted file mode 100644
index f3ab8a8..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/http.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/http11.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/http11.cpython-311.pyc
deleted file mode 100644
index 8748a46..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/http11.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/imports.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/imports.cpython-311.pyc
deleted file mode 100644
index 9a16107..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/imports.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/protocol.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/protocol.cpython-311.pyc
deleted file mode 100644
index dce44c1..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/protocol.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/server.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/server.cpython-311.pyc
deleted file mode 100644
index e1a0fdc..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/server.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/streams.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/streams.cpython-311.pyc
deleted file mode 100644
index 7afeab8..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/streams.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/typing.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/typing.cpython-311.pyc
deleted file mode 100644
index ba3d203..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/typing.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/uri.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/uri.cpython-311.pyc
deleted file mode 100644
index bac7091..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/uri.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/utils.cpython-311.pyc
deleted file mode 100644
index 882fe1c..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/utils.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/__pycache__/version.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/__pycache__/version.cpython-311.pyc
deleted file mode 100644
index cee7d09..0000000
--- a/venv/lib/python3.11/site-packages/websockets/__pycache__/version.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/auth.py b/venv/lib/python3.11/site-packages/websockets/auth.py
deleted file mode 100644
index b792e02..0000000
--- a/venv/lib/python3.11/site-packages/websockets/auth.py
+++ /dev/null
@@ -1,6 +0,0 @@
-from __future__ import annotations
-
-# See #940 for why lazy_import isn't used here for backwards compatibility.
-# See #1400 for why listing compatibility imports in __all__ helps PyCharm.
-from .legacy.auth import *
-from .legacy.auth import __all__ # noqa: F401
diff --git a/venv/lib/python3.11/site-packages/websockets/client.py b/venv/lib/python3.11/site-packages/websockets/client.py
deleted file mode 100644
index b2f6220..0000000
--- a/venv/lib/python3.11/site-packages/websockets/client.py
+++ /dev/null
@@ -1,360 +0,0 @@
-from __future__ import annotations
-
-import warnings
-from typing import Any, Generator, List, Optional, Sequence
-
-from .datastructures import Headers, MultipleValuesError
-from .exceptions import (
- InvalidHandshake,
- InvalidHeader,
- InvalidHeaderValue,
- InvalidStatus,
- InvalidUpgrade,
- NegotiationError,
-)
-from .extensions import ClientExtensionFactory, Extension
-from .headers import (
- build_authorization_basic,
- build_extension,
- build_host,
- build_subprotocol,
- parse_connection,
- parse_extension,
- parse_subprotocol,
- parse_upgrade,
-)
-from .http11 import Request, Response
-from .protocol import CLIENT, CONNECTING, OPEN, Protocol, State
-from .typing import (
- ConnectionOption,
- ExtensionHeader,
- LoggerLike,
- Origin,
- Subprotocol,
- UpgradeProtocol,
-)
-from .uri import WebSocketURI
-from .utils import accept_key, generate_key
-
-
-# See #940 for why lazy_import isn't used here for backwards compatibility.
-# See #1400 for why listing compatibility imports in __all__ helps PyCharm.
-from .legacy.client import * # isort:skip # noqa: I001
-from .legacy.client import __all__ as legacy__all__
-
-
-__all__ = ["ClientProtocol"] + legacy__all__
-
-
-class ClientProtocol(Protocol):
- """
- Sans-I/O implementation of a WebSocket client connection.
-
- Args:
- wsuri: URI of the WebSocket server, parsed
- with :func:`~websockets.uri.parse_uri`.
- origin: value of the ``Origin`` header. This is useful when connecting
- to a server that validates the ``Origin`` header to defend against
- Cross-Site WebSocket Hijacking attacks.
- extensions: list of supported extensions, in order in which they
- should be tried.
- subprotocols: list of supported subprotocols, in order of decreasing
- preference.
- state: initial state of the WebSocket connection.
- max_size: maximum size of incoming messages in bytes;
- :obj:`None` disables the limit.
- logger: logger for this connection;
- defaults to ``logging.getLogger("websockets.client")``;
- see the :doc:`logging guide <../../topics/logging>` for details.
-
- """
-
- def __init__(
- self,
- wsuri: WebSocketURI,
- *,
- origin: Optional[Origin] = None,
- extensions: Optional[Sequence[ClientExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- state: State = CONNECTING,
- max_size: Optional[int] = 2**20,
- logger: Optional[LoggerLike] = None,
- ):
- super().__init__(
- side=CLIENT,
- state=state,
- max_size=max_size,
- logger=logger,
- )
- self.wsuri = wsuri
- self.origin = origin
- self.available_extensions = extensions
- self.available_subprotocols = subprotocols
- self.key = generate_key()
-
- def connect(self) -> Request:
- """
- Create a handshake request to open a connection.
-
- You must send the handshake request with :meth:`send_request`.
-
- You can modify it before sending it, for example to add HTTP headers.
-
- Returns:
- Request: WebSocket handshake request event to send to the server.
-
- """
- headers = Headers()
-
- headers["Host"] = build_host(
- self.wsuri.host, self.wsuri.port, self.wsuri.secure
- )
-
- if self.wsuri.user_info:
- headers["Authorization"] = build_authorization_basic(*self.wsuri.user_info)
-
- if self.origin is not None:
- headers["Origin"] = self.origin
-
- headers["Upgrade"] = "websocket"
- headers["Connection"] = "Upgrade"
- headers["Sec-WebSocket-Key"] = self.key
- headers["Sec-WebSocket-Version"] = "13"
-
- if self.available_extensions is not None:
- extensions_header = build_extension(
- [
- (extension_factory.name, extension_factory.get_request_params())
- for extension_factory in self.available_extensions
- ]
- )
- headers["Sec-WebSocket-Extensions"] = extensions_header
-
- if self.available_subprotocols is not None:
- protocol_header = build_subprotocol(self.available_subprotocols)
- headers["Sec-WebSocket-Protocol"] = protocol_header
-
- return Request(self.wsuri.resource_name, headers)
-
- def process_response(self, response: Response) -> None:
- """
- Check a handshake response.
-
- Args:
- request: WebSocket handshake response received from the server.
-
- Raises:
- InvalidHandshake: if the handshake response is invalid.
-
- """
-
- if response.status_code != 101:
- raise InvalidStatus(response)
-
- headers = response.headers
-
- connection: List[ConnectionOption] = sum(
- [parse_connection(value) for value in headers.get_all("Connection")], []
- )
-
- if not any(value.lower() == "upgrade" for value in connection):
- raise InvalidUpgrade(
- "Connection", ", ".join(connection) if connection else None
- )
-
- upgrade: List[UpgradeProtocol] = sum(
- [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
- )
-
- # For compatibility with non-strict implementations, ignore case when
- # checking the Upgrade header. It's supposed to be 'WebSocket'.
- if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
- raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None)
-
- try:
- s_w_accept = headers["Sec-WebSocket-Accept"]
- except KeyError as exc:
- raise InvalidHeader("Sec-WebSocket-Accept") from exc
- except MultipleValuesError as exc:
- raise InvalidHeader(
- "Sec-WebSocket-Accept",
- "more than one Sec-WebSocket-Accept header found",
- ) from exc
-
- if s_w_accept != accept_key(self.key):
- raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept)
-
- self.extensions = self.process_extensions(headers)
-
- self.subprotocol = self.process_subprotocol(headers)
-
- def process_extensions(self, headers: Headers) -> List[Extension]:
- """
- Handle the Sec-WebSocket-Extensions HTTP response header.
-
- Check that each extension is supported, as well as its parameters.
-
- :rfc:`6455` leaves the rules up to the specification of each
- extension.
-
- To provide this level of flexibility, for each extension accepted by
- the server, we check for a match with each extension available in the
- client configuration. If no match is found, an exception is raised.
-
- If several variants of the same extension are accepted by the server,
- it may be configured several times, which won't make sense in general.
- Extensions must implement their own requirements. For this purpose,
- the list of previously accepted extensions is provided.
-
- Other requirements, for example related to mandatory extensions or the
- order of extensions, may be implemented by overriding this method.
-
- Args:
- headers: WebSocket handshake response headers.
-
- Returns:
- List[Extension]: List of accepted extensions.
-
- Raises:
- InvalidHandshake: to abort the handshake.
-
- """
- accepted_extensions: List[Extension] = []
-
- extensions = headers.get_all("Sec-WebSocket-Extensions")
-
- if extensions:
- if self.available_extensions is None:
- raise InvalidHandshake("no extensions supported")
-
- parsed_extensions: List[ExtensionHeader] = sum(
- [parse_extension(header_value) for header_value in extensions], []
- )
-
- for name, response_params in parsed_extensions:
- for extension_factory in self.available_extensions:
- # Skip non-matching extensions based on their name.
- if extension_factory.name != name:
- continue
-
- # Skip non-matching extensions based on their params.
- try:
- extension = extension_factory.process_response_params(
- response_params, accepted_extensions
- )
- except NegotiationError:
- continue
-
- # Add matching extension to the final list.
- accepted_extensions.append(extension)
-
- # Break out of the loop once we have a match.
- break
-
- # If we didn't break from the loop, no extension in our list
- # matched what the server sent. Fail the connection.
- else:
- raise NegotiationError(
- f"Unsupported extension: "
- f"name = {name}, params = {response_params}"
- )
-
- return accepted_extensions
-
- def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]:
- """
- Handle the Sec-WebSocket-Protocol HTTP response header.
-
- If provided, check that it contains exactly one supported subprotocol.
-
- Args:
- headers: WebSocket handshake response headers.
-
- Returns:
- Optional[Subprotocol]: Subprotocol, if one was selected.
-
- """
- subprotocol: Optional[Subprotocol] = None
-
- subprotocols = headers.get_all("Sec-WebSocket-Protocol")
-
- if subprotocols:
- if self.available_subprotocols is None:
- raise InvalidHandshake("no subprotocols supported")
-
- parsed_subprotocols: Sequence[Subprotocol] = sum(
- [parse_subprotocol(header_value) for header_value in subprotocols], []
- )
-
- if len(parsed_subprotocols) > 1:
- subprotocols_display = ", ".join(parsed_subprotocols)
- raise InvalidHandshake(f"multiple subprotocols: {subprotocols_display}")
-
- subprotocol = parsed_subprotocols[0]
-
- if subprotocol not in self.available_subprotocols:
- raise NegotiationError(f"unsupported subprotocol: {subprotocol}")
-
- return subprotocol
-
- def send_request(self, request: Request) -> None:
- """
- Send a handshake request to the server.
-
- Args:
- request: WebSocket handshake request event.
-
- """
- if self.debug:
- self.logger.debug("> GET %s HTTP/1.1", request.path)
- for key, value in request.headers.raw_items():
- self.logger.debug("> %s: %s", key, value)
-
- self.writes.append(request.serialize())
-
- def parse(self) -> Generator[None, None, None]:
- if self.state is CONNECTING:
- try:
- response = yield from Response.parse(
- self.reader.read_line,
- self.reader.read_exact,
- self.reader.read_to_eof,
- )
- except Exception as exc:
- self.handshake_exc = exc
- self.parser = self.discard()
- next(self.parser) # start coroutine
- yield
-
- if self.debug:
- code, phrase = response.status_code, response.reason_phrase
- self.logger.debug("< HTTP/1.1 %d %s", code, phrase)
- for key, value in response.headers.raw_items():
- self.logger.debug("< %s: %s", key, value)
- if response.body is not None:
- self.logger.debug("< [body] (%d bytes)", len(response.body))
-
- try:
- self.process_response(response)
- except InvalidHandshake as exc:
- response._exception = exc
- self.events.append(response)
- self.handshake_exc = exc
- self.parser = self.discard()
- next(self.parser) # start coroutine
- yield
-
- assert self.state is CONNECTING
- self.state = OPEN
- self.events.append(response)
-
- yield from super().parse()
-
-
-class ClientConnection(ClientProtocol):
- def __init__(self, *args: Any, **kwargs: Any) -> None:
- warnings.warn(
- "ClientConnection was renamed to ClientProtocol",
- DeprecationWarning,
- )
- super().__init__(*args, **kwargs)
diff --git a/venv/lib/python3.11/site-packages/websockets/connection.py b/venv/lib/python3.11/site-packages/websockets/connection.py
deleted file mode 100644
index 88bcda1..0000000
--- a/venv/lib/python3.11/site-packages/websockets/connection.py
+++ /dev/null
@@ -1,13 +0,0 @@
-from __future__ import annotations
-
-import warnings
-
-# lazy_import doesn't support this use case.
-from .protocol import SEND_EOF, Protocol as Connection, Side, State # noqa: F401
-
-
-warnings.warn(
- "websockets.connection was renamed to websockets.protocol "
- "and Connection was renamed to Protocol",
- DeprecationWarning,
-)
diff --git a/venv/lib/python3.11/site-packages/websockets/datastructures.py b/venv/lib/python3.11/site-packages/websockets/datastructures.py
deleted file mode 100644
index a0a6484..0000000
--- a/venv/lib/python3.11/site-packages/websockets/datastructures.py
+++ /dev/null
@@ -1,194 +0,0 @@
-from __future__ import annotations
-
-from typing import (
- Any,
- Dict,
- Iterable,
- Iterator,
- List,
- Mapping,
- MutableMapping,
- Protocol,
- Tuple,
- Union,
-)
-
-
-__all__ = ["Headers", "HeadersLike", "MultipleValuesError"]
-
-
-class MultipleValuesError(LookupError):
- """
- Exception raised when :class:`Headers` has more than one value for a key.
-
- """
-
- def __str__(self) -> str:
- # Implement the same logic as KeyError_str in Objects/exceptions.c.
- if len(self.args) == 1:
- return repr(self.args[0])
- return super().__str__()
-
-
-class Headers(MutableMapping[str, str]):
- """
- Efficient data structure for manipulating HTTP headers.
-
- A :class:`list` of ``(name, values)`` is inefficient for lookups.
-
- A :class:`dict` doesn't suffice because header names are case-insensitive
- and multiple occurrences of headers with the same name are possible.
-
- :class:`Headers` stores HTTP headers in a hybrid data structure to provide
- efficient insertions and lookups while preserving the original data.
-
- In order to account for multiple values with minimal hassle,
- :class:`Headers` follows this logic:
-
- - When getting a header with ``headers[name]``:
- - if there's no value, :exc:`KeyError` is raised;
- - if there's exactly one value, it's returned;
- - if there's more than one value, :exc:`MultipleValuesError` is raised.
-
- - When setting a header with ``headers[name] = value``, the value is
- appended to the list of values for that header.
-
- - When deleting a header with ``del headers[name]``, all values for that
- header are removed (this is slow).
-
- Other methods for manipulating headers are consistent with this logic.
-
- As long as no header occurs multiple times, :class:`Headers` behaves like
- :class:`dict`, except keys are lower-cased to provide case-insensitivity.
-
- Two methods support manipulating multiple values explicitly:
-
- - :meth:`get_all` returns a list of all values for a header;
- - :meth:`raw_items` returns an iterator of ``(name, values)`` pairs.
-
- """
-
- __slots__ = ["_dict", "_list"]
-
- # Like dict, Headers accepts an optional "mapping or iterable" argument.
- def __init__(self, *args: HeadersLike, **kwargs: str) -> None:
- self._dict: Dict[str, List[str]] = {}
- self._list: List[Tuple[str, str]] = []
- self.update(*args, **kwargs)
-
- def __str__(self) -> str:
- return "".join(f"{key}: {value}\r\n" for key, value in self._list) + "\r\n"
-
- def __repr__(self) -> str:
- return f"{self.__class__.__name__}({self._list!r})"
-
- def copy(self) -> Headers:
- copy = self.__class__()
- copy._dict = self._dict.copy()
- copy._list = self._list.copy()
- return copy
-
- def serialize(self) -> bytes:
- # Since headers only contain ASCII characters, we can keep this simple.
- return str(self).encode()
-
- # Collection methods
-
- def __contains__(self, key: object) -> bool:
- return isinstance(key, str) and key.lower() in self._dict
-
- def __iter__(self) -> Iterator[str]:
- return iter(self._dict)
-
- def __len__(self) -> int:
- return len(self._dict)
-
- # MutableMapping methods
-
- def __getitem__(self, key: str) -> str:
- value = self._dict[key.lower()]
- if len(value) == 1:
- return value[0]
- else:
- raise MultipleValuesError(key)
-
- def __setitem__(self, key: str, value: str) -> None:
- self._dict.setdefault(key.lower(), []).append(value)
- self._list.append((key, value))
-
- def __delitem__(self, key: str) -> None:
- key_lower = key.lower()
- self._dict.__delitem__(key_lower)
- # This is inefficient. Fortunately deleting HTTP headers is uncommon.
- self._list = [(k, v) for k, v in self._list if k.lower() != key_lower]
-
- def __eq__(self, other: Any) -> bool:
- if not isinstance(other, Headers):
- return NotImplemented
- return self._dict == other._dict
-
- def clear(self) -> None:
- """
- Remove all headers.
-
- """
- self._dict = {}
- self._list = []
-
- def update(self, *args: HeadersLike, **kwargs: str) -> None:
- """
- Update from a :class:`Headers` instance and/or keyword arguments.
-
- """
- args = tuple(
- arg.raw_items() if isinstance(arg, Headers) else arg for arg in args
- )
- super().update(*args, **kwargs)
-
- # Methods for handling multiple values
-
- def get_all(self, key: str) -> List[str]:
- """
- Return the (possibly empty) list of all values for a header.
-
- Args:
- key: header name.
-
- """
- return self._dict.get(key.lower(), [])
-
- def raw_items(self) -> Iterator[Tuple[str, str]]:
- """
- Return an iterator of all values as ``(name, value)`` pairs.
-
- """
- return iter(self._list)
-
-
-# copy of _typeshed.SupportsKeysAndGetItem.
-class SupportsKeysAndGetItem(Protocol): # pragma: no cover
- """
- Dict-like types with ``keys() -> str`` and ``__getitem__(key: str) -> str`` methods.
-
- """
-
- def keys(self) -> Iterable[str]:
- ...
-
- def __getitem__(self, key: str) -> str:
- ...
-
-
-HeadersLike = Union[
- Headers,
- Mapping[str, str],
- Iterable[Tuple[str, str]],
- SupportsKeysAndGetItem,
-]
-"""
-Types accepted where :class:`Headers` is expected.
-
-In addition to :class:`Headers` itself, this includes dict-like types where both
-keys and values are :class:`str`.
-
-"""
diff --git a/venv/lib/python3.11/site-packages/websockets/exceptions.py b/venv/lib/python3.11/site-packages/websockets/exceptions.py
deleted file mode 100644
index f7169e3..0000000
--- a/venv/lib/python3.11/site-packages/websockets/exceptions.py
+++ /dev/null
@@ -1,405 +0,0 @@
-"""
-:mod:`websockets.exceptions` defines the following exception hierarchy:
-
-* :exc:`WebSocketException`
- * :exc:`ConnectionClosed`
- * :exc:`ConnectionClosedError`
- * :exc:`ConnectionClosedOK`
- * :exc:`InvalidHandshake`
- * :exc:`SecurityError`
- * :exc:`InvalidMessage`
- * :exc:`InvalidHeader`
- * :exc:`InvalidHeaderFormat`
- * :exc:`InvalidHeaderValue`
- * :exc:`InvalidOrigin`
- * :exc:`InvalidUpgrade`
- * :exc:`InvalidStatus`
- * :exc:`InvalidStatusCode` (legacy)
- * :exc:`NegotiationError`
- * :exc:`DuplicateParameter`
- * :exc:`InvalidParameterName`
- * :exc:`InvalidParameterValue`
- * :exc:`AbortHandshake`
- * :exc:`RedirectHandshake`
- * :exc:`InvalidState`
- * :exc:`InvalidURI`
- * :exc:`PayloadTooBig`
- * :exc:`ProtocolError`
-
-"""
-
-from __future__ import annotations
-
-import http
-from typing import Optional
-
-from . import datastructures, frames, http11
-from .typing import StatusLike
-
-
-__all__ = [
- "WebSocketException",
- "ConnectionClosed",
- "ConnectionClosedError",
- "ConnectionClosedOK",
- "InvalidHandshake",
- "SecurityError",
- "InvalidMessage",
- "InvalidHeader",
- "InvalidHeaderFormat",
- "InvalidHeaderValue",
- "InvalidOrigin",
- "InvalidUpgrade",
- "InvalidStatus",
- "InvalidStatusCode",
- "NegotiationError",
- "DuplicateParameter",
- "InvalidParameterName",
- "InvalidParameterValue",
- "AbortHandshake",
- "RedirectHandshake",
- "InvalidState",
- "InvalidURI",
- "PayloadTooBig",
- "ProtocolError",
- "WebSocketProtocolError",
-]
-
-
-class WebSocketException(Exception):
- """
- Base class for all exceptions defined by websockets.
-
- """
-
-
-class ConnectionClosed(WebSocketException):
- """
- Raised when trying to interact with a closed connection.
-
- Attributes:
- rcvd (Optional[Close]): if a close frame was received, its code and
- reason are available in ``rcvd.code`` and ``rcvd.reason``.
- sent (Optional[Close]): if a close frame was sent, its code and reason
- are available in ``sent.code`` and ``sent.reason``.
- rcvd_then_sent (Optional[bool]): if close frames were received and
- sent, this attribute tells in which order this happened, from the
- perspective of this side of the connection.
-
- """
-
- def __init__(
- self,
- rcvd: Optional[frames.Close],
- sent: Optional[frames.Close],
- rcvd_then_sent: Optional[bool] = None,
- ) -> None:
- self.rcvd = rcvd
- self.sent = sent
- self.rcvd_then_sent = rcvd_then_sent
-
- def __str__(self) -> str:
- if self.rcvd is None:
- if self.sent is None:
- assert self.rcvd_then_sent is None
- return "no close frame received or sent"
- else:
- assert self.rcvd_then_sent is None
- return f"sent {self.sent}; no close frame received"
- else:
- if self.sent is None:
- assert self.rcvd_then_sent is None
- return f"received {self.rcvd}; no close frame sent"
- else:
- assert self.rcvd_then_sent is not None
- if self.rcvd_then_sent:
- return f"received {self.rcvd}; then sent {self.sent}"
- else:
- return f"sent {self.sent}; then received {self.rcvd}"
-
- # code and reason attributes are provided for backwards-compatibility
-
- @property
- def code(self) -> int:
- if self.rcvd is None:
- return frames.CloseCode.ABNORMAL_CLOSURE
- return self.rcvd.code
-
- @property
- def reason(self) -> str:
- if self.rcvd is None:
- return ""
- return self.rcvd.reason
-
-
-class ConnectionClosedError(ConnectionClosed):
- """
- Like :exc:`ConnectionClosed`, when the connection terminated with an error.
-
- A close frame with a code other than 1000 (OK) or 1001 (going away) was
- received or sent, or the closing handshake didn't complete properly.
-
- """
-
-
-class ConnectionClosedOK(ConnectionClosed):
- """
- Like :exc:`ConnectionClosed`, when the connection terminated properly.
-
- A close code with code 1000 (OK) or 1001 (going away) or without a code was
- received and sent.
-
- """
-
-
-class InvalidHandshake(WebSocketException):
- """
- Raised during the handshake when the WebSocket connection fails.
-
- """
-
-
-class SecurityError(InvalidHandshake):
- """
- Raised when a handshake request or response breaks a security rule.
-
- Security limits are hard coded.
-
- """
-
-
-class InvalidMessage(InvalidHandshake):
- """
- Raised when a handshake request or response is malformed.
-
- """
-
-
-class InvalidHeader(InvalidHandshake):
- """
- Raised when an HTTP header doesn't have a valid format or value.
-
- """
-
- def __init__(self, name: str, value: Optional[str] = None) -> None:
- self.name = name
- self.value = value
-
- def __str__(self) -> str:
- if self.value is None:
- return f"missing {self.name} header"
- elif self.value == "":
- return f"empty {self.name} header"
- else:
- return f"invalid {self.name} header: {self.value}"
-
-
-class InvalidHeaderFormat(InvalidHeader):
- """
- Raised when an HTTP header cannot be parsed.
-
- The format of the header doesn't match the grammar for that header.
-
- """
-
- def __init__(self, name: str, error: str, header: str, pos: int) -> None:
- super().__init__(name, f"{error} at {pos} in {header}")
-
-
-class InvalidHeaderValue(InvalidHeader):
- """
- Raised when an HTTP header has a wrong value.
-
- The format of the header is correct but a value isn't acceptable.
-
- """
-
-
-class InvalidOrigin(InvalidHeader):
- """
- Raised when the Origin header in a request isn't allowed.
-
- """
-
- def __init__(self, origin: Optional[str]) -> None:
- super().__init__("Origin", origin)
-
-
-class InvalidUpgrade(InvalidHeader):
- """
- Raised when the Upgrade or Connection header isn't correct.
-
- """
-
-
-class InvalidStatus(InvalidHandshake):
- """
- Raised when a handshake response rejects the WebSocket upgrade.
-
- """
-
- def __init__(self, response: http11.Response) -> None:
- self.response = response
-
- def __str__(self) -> str:
- return (
- "server rejected WebSocket connection: "
- f"HTTP {self.response.status_code:d}"
- )
-
-
-class InvalidStatusCode(InvalidHandshake):
- """
- Raised when a handshake response status code is invalid.
-
- """
-
- def __init__(self, status_code: int, headers: datastructures.Headers) -> None:
- self.status_code = status_code
- self.headers = headers
-
- def __str__(self) -> str:
- return f"server rejected WebSocket connection: HTTP {self.status_code}"
-
-
-class NegotiationError(InvalidHandshake):
- """
- Raised when negotiating an extension fails.
-
- """
-
-
-class DuplicateParameter(NegotiationError):
- """
- Raised when a parameter name is repeated in an extension header.
-
- """
-
- def __init__(self, name: str) -> None:
- self.name = name
-
- def __str__(self) -> str:
- return f"duplicate parameter: {self.name}"
-
-
-class InvalidParameterName(NegotiationError):
- """
- Raised when a parameter name in an extension header is invalid.
-
- """
-
- def __init__(self, name: str) -> None:
- self.name = name
-
- def __str__(self) -> str:
- return f"invalid parameter name: {self.name}"
-
-
-class InvalidParameterValue(NegotiationError):
- """
- Raised when a parameter value in an extension header is invalid.
-
- """
-
- def __init__(self, name: str, value: Optional[str]) -> None:
- self.name = name
- self.value = value
-
- def __str__(self) -> str:
- if self.value is None:
- return f"missing value for parameter {self.name}"
- elif self.value == "":
- return f"empty value for parameter {self.name}"
- else:
- return f"invalid value for parameter {self.name}: {self.value}"
-
-
-class AbortHandshake(InvalidHandshake):
- """
- Raised to abort the handshake on purpose and return an HTTP response.
-
- This exception is an implementation detail.
-
- The public API
- is :meth:`~websockets.server.WebSocketServerProtocol.process_request`.
-
- Attributes:
- status (~http.HTTPStatus): HTTP status code.
- headers (Headers): HTTP response headers.
- body (bytes): HTTP response body.
- """
-
- def __init__(
- self,
- status: StatusLike,
- headers: datastructures.HeadersLike,
- body: bytes = b"",
- ) -> None:
- # If a user passes an int instead of a HTTPStatus, fix it automatically.
- self.status = http.HTTPStatus(status)
- self.headers = datastructures.Headers(headers)
- self.body = body
-
- def __str__(self) -> str:
- return (
- f"HTTP {self.status:d}, "
- f"{len(self.headers)} headers, "
- f"{len(self.body)} bytes"
- )
-
-
-class RedirectHandshake(InvalidHandshake):
- """
- Raised when a handshake gets redirected.
-
- This exception is an implementation detail.
-
- """
-
- def __init__(self, uri: str) -> None:
- self.uri = uri
-
- def __str__(self) -> str:
- return f"redirect to {self.uri}"
-
-
-class InvalidState(WebSocketException, AssertionError):
- """
- Raised when an operation is forbidden in the current state.
-
- This exception is an implementation detail.
-
- It should never be raised in normal circumstances.
-
- """
-
-
-class InvalidURI(WebSocketException):
- """
- Raised when connecting to a URI that isn't a valid WebSocket URI.
-
- """
-
- def __init__(self, uri: str, msg: str) -> None:
- self.uri = uri
- self.msg = msg
-
- def __str__(self) -> str:
- return f"{self.uri} isn't a valid URI: {self.msg}"
-
-
-class PayloadTooBig(WebSocketException):
- """
- Raised when receiving a frame with a payload exceeding the maximum size.
-
- """
-
-
-class ProtocolError(WebSocketException):
- """
- Raised when a frame breaks the protocol.
-
- """
-
-
-WebSocketProtocolError = ProtocolError # for backwards compatibility
diff --git a/venv/lib/python3.11/site-packages/websockets/extensions/__init__.py b/venv/lib/python3.11/site-packages/websockets/extensions/__init__.py
deleted file mode 100644
index 02838b9..0000000
--- a/venv/lib/python3.11/site-packages/websockets/extensions/__init__.py
+++ /dev/null
@@ -1,4 +0,0 @@
-from .base import *
-
-
-__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
diff --git a/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 746fb9a..0000000
--- a/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/base.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/base.cpython-311.pyc
deleted file mode 100644
index 1f40641..0000000
--- a/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/base.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/permessage_deflate.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/permessage_deflate.cpython-311.pyc
deleted file mode 100644
index 925006a..0000000
--- a/venv/lib/python3.11/site-packages/websockets/extensions/__pycache__/permessage_deflate.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/extensions/base.py b/venv/lib/python3.11/site-packages/websockets/extensions/base.py
deleted file mode 100644
index 6c481a4..0000000
--- a/venv/lib/python3.11/site-packages/websockets/extensions/base.py
+++ /dev/null
@@ -1,133 +0,0 @@
-from __future__ import annotations
-
-from typing import List, Optional, Sequence, Tuple
-
-from .. import frames
-from ..typing import ExtensionName, ExtensionParameter
-
-
-__all__ = ["Extension", "ClientExtensionFactory", "ServerExtensionFactory"]
-
-
-class Extension:
- """
- Base class for extensions.
-
- """
-
- name: ExtensionName
- """Extension identifier."""
-
- def decode(
- self,
- frame: frames.Frame,
- *,
- max_size: Optional[int] = None,
- ) -> frames.Frame:
- """
- Decode an incoming frame.
-
- Args:
- frame (Frame): incoming frame.
- max_size: maximum payload size in bytes.
-
- Returns:
- Frame: Decoded frame.
-
- Raises:
- PayloadTooBig: if decoding the payload exceeds ``max_size``.
-
- """
- raise NotImplementedError
-
- def encode(self, frame: frames.Frame) -> frames.Frame:
- """
- Encode an outgoing frame.
-
- Args:
- frame (Frame): outgoing frame.
-
- Returns:
- Frame: Encoded frame.
-
- """
- raise NotImplementedError
-
-
-class ClientExtensionFactory:
- """
- Base class for client-side extension factories.
-
- """
-
- name: ExtensionName
- """Extension identifier."""
-
- def get_request_params(self) -> List[ExtensionParameter]:
- """
- Build parameters to send to the server for this extension.
-
- Returns:
- List[ExtensionParameter]: Parameters to send to the server.
-
- """
- raise NotImplementedError
-
- def process_response_params(
- self,
- params: Sequence[ExtensionParameter],
- accepted_extensions: Sequence[Extension],
- ) -> Extension:
- """
- Process parameters received from the server.
-
- Args:
- params (Sequence[ExtensionParameter]): parameters received from
- the server for this extension.
- accepted_extensions (Sequence[Extension]): list of previously
- accepted extensions.
-
- Returns:
- Extension: An extension instance.
-
- Raises:
- NegotiationError: if parameters aren't acceptable.
-
- """
- raise NotImplementedError
-
-
-class ServerExtensionFactory:
- """
- Base class for server-side extension factories.
-
- """
-
- name: ExtensionName
- """Extension identifier."""
-
- def process_request_params(
- self,
- params: Sequence[ExtensionParameter],
- accepted_extensions: Sequence[Extension],
- ) -> Tuple[List[ExtensionParameter], Extension]:
- """
- Process parameters received from the client.
-
- Args:
- params (Sequence[ExtensionParameter]): parameters received from
- the client for this extension.
- accepted_extensions (Sequence[Extension]): list of previously
- accepted extensions.
-
- Returns:
- Tuple[List[ExtensionParameter], Extension]: To accept the offer,
- parameters to send to the client for this extension and an
- extension instance.
-
- Raises:
- NegotiationError: to reject the offer, if parameters received from
- the client aren't acceptable.
-
- """
- raise NotImplementedError
diff --git a/venv/lib/python3.11/site-packages/websockets/extensions/permessage_deflate.py b/venv/lib/python3.11/site-packages/websockets/extensions/permessage_deflate.py
deleted file mode 100644
index b391837..0000000
--- a/venv/lib/python3.11/site-packages/websockets/extensions/permessage_deflate.py
+++ /dev/null
@@ -1,660 +0,0 @@
-from __future__ import annotations
-
-import dataclasses
-import zlib
-from typing import Any, Dict, List, Optional, Sequence, Tuple, Union
-
-from .. import exceptions, frames
-from ..typing import ExtensionName, ExtensionParameter
-from .base import ClientExtensionFactory, Extension, ServerExtensionFactory
-
-
-__all__ = [
- "PerMessageDeflate",
- "ClientPerMessageDeflateFactory",
- "enable_client_permessage_deflate",
- "ServerPerMessageDeflateFactory",
- "enable_server_permessage_deflate",
-]
-
-_EMPTY_UNCOMPRESSED_BLOCK = b"\x00\x00\xff\xff"
-
-_MAX_WINDOW_BITS_VALUES = [str(bits) for bits in range(8, 16)]
-
-
-class PerMessageDeflate(Extension):
- """
- Per-Message Deflate extension.
-
- """
-
- name = ExtensionName("permessage-deflate")
-
- def __init__(
- self,
- remote_no_context_takeover: bool,
- local_no_context_takeover: bool,
- remote_max_window_bits: int,
- local_max_window_bits: int,
- compress_settings: Optional[Dict[Any, Any]] = None,
- ) -> None:
- """
- Configure the Per-Message Deflate extension.
-
- """
- if compress_settings is None:
- compress_settings = {}
-
- assert remote_no_context_takeover in [False, True]
- assert local_no_context_takeover in [False, True]
- assert 8 <= remote_max_window_bits <= 15
- assert 8 <= local_max_window_bits <= 15
- assert "wbits" not in compress_settings
-
- self.remote_no_context_takeover = remote_no_context_takeover
- self.local_no_context_takeover = local_no_context_takeover
- self.remote_max_window_bits = remote_max_window_bits
- self.local_max_window_bits = local_max_window_bits
- self.compress_settings = compress_settings
-
- if not self.remote_no_context_takeover:
- self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
-
- if not self.local_no_context_takeover:
- self.encoder = zlib.compressobj(
- wbits=-self.local_max_window_bits, **self.compress_settings
- )
-
- # To handle continuation frames properly, we must keep track of
- # whether that initial frame was encoded.
- self.decode_cont_data = False
- # There's no need for self.encode_cont_data because we always encode
- # outgoing frames, so it would always be True.
-
- def __repr__(self) -> str:
- return (
- f"PerMessageDeflate("
- f"remote_no_context_takeover={self.remote_no_context_takeover}, "
- f"local_no_context_takeover={self.local_no_context_takeover}, "
- f"remote_max_window_bits={self.remote_max_window_bits}, "
- f"local_max_window_bits={self.local_max_window_bits})"
- )
-
- def decode(
- self,
- frame: frames.Frame,
- *,
- max_size: Optional[int] = None,
- ) -> frames.Frame:
- """
- Decode an incoming frame.
-
- """
- # Skip control frames.
- if frame.opcode in frames.CTRL_OPCODES:
- return frame
-
- # Handle continuation data frames:
- # - skip if the message isn't encoded
- # - reset "decode continuation data" flag if it's a final frame
- if frame.opcode is frames.OP_CONT:
- if not self.decode_cont_data:
- return frame
- if frame.fin:
- self.decode_cont_data = False
-
- # Handle text and binary data frames:
- # - skip if the message isn't encoded
- # - unset the rsv1 flag on the first frame of a compressed message
- # - set "decode continuation data" flag if it's a non-final frame
- else:
- if not frame.rsv1:
- return frame
- frame = dataclasses.replace(frame, rsv1=False)
- if not frame.fin:
- self.decode_cont_data = True
-
- # Re-initialize per-message decoder.
- if self.remote_no_context_takeover:
- self.decoder = zlib.decompressobj(wbits=-self.remote_max_window_bits)
-
- # Uncompress data. Protect against zip bombs by preventing zlib from
- # decompressing more than max_length bytes (except when the limit is
- # disabled with max_size = None).
- data = frame.data
- if frame.fin:
- data += _EMPTY_UNCOMPRESSED_BLOCK
- max_length = 0 if max_size is None else max_size
- try:
- data = self.decoder.decompress(data, max_length)
- except zlib.error as exc:
- raise exceptions.ProtocolError("decompression failed") from exc
- if self.decoder.unconsumed_tail:
- raise exceptions.PayloadTooBig(f"over size limit (? > {max_size} bytes)")
-
- # Allow garbage collection of the decoder if it won't be reused.
- if frame.fin and self.remote_no_context_takeover:
- del self.decoder
-
- return dataclasses.replace(frame, data=data)
-
- def encode(self, frame: frames.Frame) -> frames.Frame:
- """
- Encode an outgoing frame.
-
- """
- # Skip control frames.
- if frame.opcode in frames.CTRL_OPCODES:
- return frame
-
- # Since we always encode messages, there's no "encode continuation
- # data" flag similar to "decode continuation data" at this time.
-
- if frame.opcode is not frames.OP_CONT:
- # Set the rsv1 flag on the first frame of a compressed message.
- frame = dataclasses.replace(frame, rsv1=True)
- # Re-initialize per-message decoder.
- if self.local_no_context_takeover:
- self.encoder = zlib.compressobj(
- wbits=-self.local_max_window_bits, **self.compress_settings
- )
-
- # Compress data.
- data = self.encoder.compress(frame.data) + self.encoder.flush(zlib.Z_SYNC_FLUSH)
- if frame.fin and data.endswith(_EMPTY_UNCOMPRESSED_BLOCK):
- data = data[:-4]
-
- # Allow garbage collection of the encoder if it won't be reused.
- if frame.fin and self.local_no_context_takeover:
- del self.encoder
-
- return dataclasses.replace(frame, data=data)
-
-
-def _build_parameters(
- server_no_context_takeover: bool,
- client_no_context_takeover: bool,
- server_max_window_bits: Optional[int],
- client_max_window_bits: Optional[Union[int, bool]],
-) -> List[ExtensionParameter]:
- """
- Build a list of ``(name, value)`` pairs for some compression parameters.
-
- """
- params: List[ExtensionParameter] = []
- if server_no_context_takeover:
- params.append(("server_no_context_takeover", None))
- if client_no_context_takeover:
- params.append(("client_no_context_takeover", None))
- if server_max_window_bits:
- params.append(("server_max_window_bits", str(server_max_window_bits)))
- if client_max_window_bits is True: # only in handshake requests
- params.append(("client_max_window_bits", None))
- elif client_max_window_bits:
- params.append(("client_max_window_bits", str(client_max_window_bits)))
- return params
-
-
-def _extract_parameters(
- params: Sequence[ExtensionParameter], *, is_server: bool
-) -> Tuple[bool, bool, Optional[int], Optional[Union[int, bool]]]:
- """
- Extract compression parameters from a list of ``(name, value)`` pairs.
-
- If ``is_server`` is :obj:`True`, ``client_max_window_bits`` may be
- provided without a value. This is only allowed in handshake requests.
-
- """
- server_no_context_takeover: bool = False
- client_no_context_takeover: bool = False
- server_max_window_bits: Optional[int] = None
- client_max_window_bits: Optional[Union[int, bool]] = None
-
- for name, value in params:
- if name == "server_no_context_takeover":
- if server_no_context_takeover:
- raise exceptions.DuplicateParameter(name)
- if value is None:
- server_no_context_takeover = True
- else:
- raise exceptions.InvalidParameterValue(name, value)
-
- elif name == "client_no_context_takeover":
- if client_no_context_takeover:
- raise exceptions.DuplicateParameter(name)
- if value is None:
- client_no_context_takeover = True
- else:
- raise exceptions.InvalidParameterValue(name, value)
-
- elif name == "server_max_window_bits":
- if server_max_window_bits is not None:
- raise exceptions.DuplicateParameter(name)
- if value in _MAX_WINDOW_BITS_VALUES:
- server_max_window_bits = int(value)
- else:
- raise exceptions.InvalidParameterValue(name, value)
-
- elif name == "client_max_window_bits":
- if client_max_window_bits is not None:
- raise exceptions.DuplicateParameter(name)
- if is_server and value is None: # only in handshake requests
- client_max_window_bits = True
- elif value in _MAX_WINDOW_BITS_VALUES:
- client_max_window_bits = int(value)
- else:
- raise exceptions.InvalidParameterValue(name, value)
-
- else:
- raise exceptions.InvalidParameterName(name)
-
- return (
- server_no_context_takeover,
- client_no_context_takeover,
- server_max_window_bits,
- client_max_window_bits,
- )
-
-
-class ClientPerMessageDeflateFactory(ClientExtensionFactory):
- """
- Client-side extension factory for the Per-Message Deflate extension.
-
- Parameters behave as described in `section 7.1 of RFC 7692`_.
-
- .. _section 7.1 of RFC 7692: https://www.rfc-editor.org/rfc/rfc7692.html#section-7.1
-
- Set them to :obj:`True` to include them in the negotiation offer without a
- value or to an integer value to include them with this value.
-
- Args:
- server_no_context_takeover: prevent server from using context takeover.
- client_no_context_takeover: prevent client from using context takeover.
- server_max_window_bits: maximum size of the server's LZ77 sliding window
- in bits, between 8 and 15.
- client_max_window_bits: maximum size of the client's LZ77 sliding window
- in bits, between 8 and 15, or :obj:`True` to indicate support without
- setting a limit.
- compress_settings: additional keyword arguments for :func:`zlib.compressobj`,
- excluding ``wbits``.
-
- """
-
- name = ExtensionName("permessage-deflate")
-
- def __init__(
- self,
- server_no_context_takeover: bool = False,
- client_no_context_takeover: bool = False,
- server_max_window_bits: Optional[int] = None,
- client_max_window_bits: Optional[Union[int, bool]] = True,
- compress_settings: Optional[Dict[str, Any]] = None,
- ) -> None:
- """
- Configure the Per-Message Deflate extension factory.
-
- """
- if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
- raise ValueError("server_max_window_bits must be between 8 and 15")
- if not (
- client_max_window_bits is None
- or client_max_window_bits is True
- or 8 <= client_max_window_bits <= 15
- ):
- raise ValueError("client_max_window_bits must be between 8 and 15")
- if compress_settings is not None and "wbits" in compress_settings:
- raise ValueError(
- "compress_settings must not include wbits, "
- "set client_max_window_bits instead"
- )
-
- self.server_no_context_takeover = server_no_context_takeover
- self.client_no_context_takeover = client_no_context_takeover
- self.server_max_window_bits = server_max_window_bits
- self.client_max_window_bits = client_max_window_bits
- self.compress_settings = compress_settings
-
- def get_request_params(self) -> List[ExtensionParameter]:
- """
- Build request parameters.
-
- """
- return _build_parameters(
- self.server_no_context_takeover,
- self.client_no_context_takeover,
- self.server_max_window_bits,
- self.client_max_window_bits,
- )
-
- def process_response_params(
- self,
- params: Sequence[ExtensionParameter],
- accepted_extensions: Sequence[Extension],
- ) -> PerMessageDeflate:
- """
- Process response parameters.
-
- Return an extension instance.
-
- """
- if any(other.name == self.name for other in accepted_extensions):
- raise exceptions.NegotiationError(f"received duplicate {self.name}")
-
- # Request parameters are available in instance variables.
-
- # Load response parameters in local variables.
- (
- server_no_context_takeover,
- client_no_context_takeover,
- server_max_window_bits,
- client_max_window_bits,
- ) = _extract_parameters(params, is_server=False)
-
- # After comparing the request and the response, the final
- # configuration must be available in the local variables.
-
- # server_no_context_takeover
- #
- # Req. Resp. Result
- # ------ ------ --------------------------------------------------
- # False False False
- # False True True
- # True False Error!
- # True True True
-
- if self.server_no_context_takeover:
- if not server_no_context_takeover:
- raise exceptions.NegotiationError("expected server_no_context_takeover")
-
- # client_no_context_takeover
- #
- # Req. Resp. Result
- # ------ ------ --------------------------------------------------
- # False False False
- # False True True
- # True False True - must change value
- # True True True
-
- if self.client_no_context_takeover:
- if not client_no_context_takeover:
- client_no_context_takeover = True
-
- # server_max_window_bits
-
- # Req. Resp. Result
- # ------ ------ --------------------------------------------------
- # None None None
- # None 8≤M≤15 M
- # 8≤N≤15 None Error!
- # 8≤N≤15 8≤M≤N M
- # 8≤N≤15 N<M≤15 Error!
-
- if self.server_max_window_bits is None:
- pass
-
- else:
- if server_max_window_bits is None:
- raise exceptions.NegotiationError("expected server_max_window_bits")
- elif server_max_window_bits > self.server_max_window_bits:
- raise exceptions.NegotiationError("unsupported server_max_window_bits")
-
- # client_max_window_bits
-
- # Req. Resp. Result
- # ------ ------ --------------------------------------------------
- # None None None
- # None 8≤M≤15 Error!
- # True None None
- # True 8≤M≤15 M
- # 8≤N≤15 None N - must change value
- # 8≤N≤15 8≤M≤N M
- # 8≤N≤15 N<M≤15 Error!
-
- if self.client_max_window_bits is None:
- if client_max_window_bits is not None:
- raise exceptions.NegotiationError("unexpected client_max_window_bits")
-
- elif self.client_max_window_bits is True:
- pass
-
- else:
- if client_max_window_bits is None:
- client_max_window_bits = self.client_max_window_bits
- elif client_max_window_bits > self.client_max_window_bits:
- raise exceptions.NegotiationError("unsupported client_max_window_bits")
-
- return PerMessageDeflate(
- server_no_context_takeover, # remote_no_context_takeover
- client_no_context_takeover, # local_no_context_takeover
- server_max_window_bits or 15, # remote_max_window_bits
- client_max_window_bits or 15, # local_max_window_bits
- self.compress_settings,
- )
-
-
-def enable_client_permessage_deflate(
- extensions: Optional[Sequence[ClientExtensionFactory]],
-) -> Sequence[ClientExtensionFactory]:
- """
- Enable Per-Message Deflate with default settings in client extensions.
-
- If the extension is already present, perhaps with non-default settings,
- the configuration isn't changed.
-
- """
- if extensions is None:
- extensions = []
- if not any(
- extension_factory.name == ClientPerMessageDeflateFactory.name
- for extension_factory in extensions
- ):
- extensions = list(extensions) + [
- ClientPerMessageDeflateFactory(
- compress_settings={"memLevel": 5},
- )
- ]
- return extensions
-
-
-class ServerPerMessageDeflateFactory(ServerExtensionFactory):
- """
- Server-side extension factory for the Per-Message Deflate extension.
-
- Parameters behave as described in `section 7.1 of RFC 7692`_.
-
- .. _section 7.1 of RFC 7692: https://www.rfc-editor.org/rfc/rfc7692.html#section-7.1
-
- Set them to :obj:`True` to include them in the negotiation offer without a
- value or to an integer value to include them with this value.
-
- Args:
- server_no_context_takeover: prevent server from using context takeover.
- client_no_context_takeover: prevent client from using context takeover.
- server_max_window_bits: maximum size of the server's LZ77 sliding window
- in bits, between 8 and 15.
- client_max_window_bits: maximum size of the client's LZ77 sliding window
- in bits, between 8 and 15.
- compress_settings: additional keyword arguments for :func:`zlib.compressobj`,
- excluding ``wbits``.
- require_client_max_window_bits: do not enable compression at all if
- client doesn't advertise support for ``client_max_window_bits``;
- the default behavior is to enable compression without enforcing
- ``client_max_window_bits``.
-
- """
-
- name = ExtensionName("permessage-deflate")
-
- def __init__(
- self,
- server_no_context_takeover: bool = False,
- client_no_context_takeover: bool = False,
- server_max_window_bits: Optional[int] = None,
- client_max_window_bits: Optional[int] = None,
- compress_settings: Optional[Dict[str, Any]] = None,
- require_client_max_window_bits: bool = False,
- ) -> None:
- """
- Configure the Per-Message Deflate extension factory.
-
- """
- if not (server_max_window_bits is None or 8 <= server_max_window_bits <= 15):
- raise ValueError("server_max_window_bits must be between 8 and 15")
- if not (client_max_window_bits is None or 8 <= client_max_window_bits <= 15):
- raise ValueError("client_max_window_bits must be between 8 and 15")
- if compress_settings is not None and "wbits" in compress_settings:
- raise ValueError(
- "compress_settings must not include wbits, "
- "set server_max_window_bits instead"
- )
- if client_max_window_bits is None and require_client_max_window_bits:
- raise ValueError(
- "require_client_max_window_bits is enabled, "
- "but client_max_window_bits isn't configured"
- )
-
- self.server_no_context_takeover = server_no_context_takeover
- self.client_no_context_takeover = client_no_context_takeover
- self.server_max_window_bits = server_max_window_bits
- self.client_max_window_bits = client_max_window_bits
- self.compress_settings = compress_settings
- self.require_client_max_window_bits = require_client_max_window_bits
-
- def process_request_params(
- self,
- params: Sequence[ExtensionParameter],
- accepted_extensions: Sequence[Extension],
- ) -> Tuple[List[ExtensionParameter], PerMessageDeflate]:
- """
- Process request parameters.
-
- Return response params and an extension instance.
-
- """
- if any(other.name == self.name for other in accepted_extensions):
- raise exceptions.NegotiationError(f"skipped duplicate {self.name}")
-
- # Load request parameters in local variables.
- (
- server_no_context_takeover,
- client_no_context_takeover,
- server_max_window_bits,
- client_max_window_bits,
- ) = _extract_parameters(params, is_server=True)
-
- # Configuration parameters are available in instance variables.
-
- # After comparing the request and the configuration, the response must
- # be available in the local variables.
-
- # server_no_context_takeover
- #
- # Config Req. Resp.
- # ------ ------ --------------------------------------------------
- # False False False
- # False True True
- # True False True - must change value to True
- # True True True
-
- if self.server_no_context_takeover:
- if not server_no_context_takeover:
- server_no_context_takeover = True
-
- # client_no_context_takeover
- #
- # Config Req. Resp.
- # ------ ------ --------------------------------------------------
- # False False False
- # False True True (or False)
- # True False True - must change value to True
- # True True True (or False)
-
- if self.client_no_context_takeover:
- if not client_no_context_takeover:
- client_no_context_takeover = True
-
- # server_max_window_bits
-
- # Config Req. Resp.
- # ------ ------ --------------------------------------------------
- # None None None
- # None 8≤M≤15 M
- # 8≤N≤15 None N - must change value
- # 8≤N≤15 8≤M≤N M
- # 8≤N≤15 N<M≤15 N - must change value
-
- if self.server_max_window_bits is None:
- pass
-
- else:
- if server_max_window_bits is None:
- server_max_window_bits = self.server_max_window_bits
- elif server_max_window_bits > self.server_max_window_bits:
- server_max_window_bits = self.server_max_window_bits
-
- # client_max_window_bits
-
- # Config Req. Resp.
- # ------ ------ --------------------------------------------------
- # None None None
- # None True None - must change value
- # None 8≤M≤15 M (or None)
- # 8≤N≤15 None None or Error!
- # 8≤N≤15 True N - must change value
- # 8≤N≤15 8≤M≤N M (or None)
- # 8≤N≤15 N<M≤15 N
-
- if self.client_max_window_bits is None:
- if client_max_window_bits is True:
- client_max_window_bits = self.client_max_window_bits
-
- else:
- if client_max_window_bits is None:
- if self.require_client_max_window_bits:
- raise exceptions.NegotiationError("required client_max_window_bits")
- elif client_max_window_bits is True:
- client_max_window_bits = self.client_max_window_bits
- elif self.client_max_window_bits < client_max_window_bits:
- client_max_window_bits = self.client_max_window_bits
-
- return (
- _build_parameters(
- server_no_context_takeover,
- client_no_context_takeover,
- server_max_window_bits,
- client_max_window_bits,
- ),
- PerMessageDeflate(
- client_no_context_takeover, # remote_no_context_takeover
- server_no_context_takeover, # local_no_context_takeover
- client_max_window_bits or 15, # remote_max_window_bits
- server_max_window_bits or 15, # local_max_window_bits
- self.compress_settings,
- ),
- )
-
-
-def enable_server_permessage_deflate(
- extensions: Optional[Sequence[ServerExtensionFactory]],
-) -> Sequence[ServerExtensionFactory]:
- """
- Enable Per-Message Deflate with default settings in server extensions.
-
- If the extension is already present, perhaps with non-default settings,
- the configuration isn't changed.
-
- """
- if extensions is None:
- extensions = []
- if not any(
- ext_factory.name == ServerPerMessageDeflateFactory.name
- for ext_factory in extensions
- ):
- extensions = list(extensions) + [
- ServerPerMessageDeflateFactory(
- server_max_window_bits=12,
- client_max_window_bits=12,
- compress_settings={"memLevel": 5},
- )
- ]
- return extensions
diff --git a/venv/lib/python3.11/site-packages/websockets/frames.py b/venv/lib/python3.11/site-packages/websockets/frames.py
deleted file mode 100644
index 6b1befb..0000000
--- a/venv/lib/python3.11/site-packages/websockets/frames.py
+++ /dev/null
@@ -1,470 +0,0 @@
-from __future__ import annotations
-
-import dataclasses
-import enum
-import io
-import secrets
-import struct
-from typing import Callable, Generator, Optional, Sequence, Tuple
-
-from . import exceptions, extensions
-from .typing import Data
-
-
-try:
- from .speedups import apply_mask
-except ImportError:
- from .utils import apply_mask
-
-
-__all__ = [
- "Opcode",
- "OP_CONT",
- "OP_TEXT",
- "OP_BINARY",
- "OP_CLOSE",
- "OP_PING",
- "OP_PONG",
- "DATA_OPCODES",
- "CTRL_OPCODES",
- "Frame",
- "prepare_data",
- "prepare_ctrl",
- "Close",
-]
-
-
-class Opcode(enum.IntEnum):
- """Opcode values for WebSocket frames."""
-
- CONT, TEXT, BINARY = 0x00, 0x01, 0x02
- CLOSE, PING, PONG = 0x08, 0x09, 0x0A
-
-
-OP_CONT = Opcode.CONT
-OP_TEXT = Opcode.TEXT
-OP_BINARY = Opcode.BINARY
-OP_CLOSE = Opcode.CLOSE
-OP_PING = Opcode.PING
-OP_PONG = Opcode.PONG
-
-DATA_OPCODES = OP_CONT, OP_TEXT, OP_BINARY
-CTRL_OPCODES = OP_CLOSE, OP_PING, OP_PONG
-
-
-class CloseCode(enum.IntEnum):
- """Close code values for WebSocket close frames."""
-
- NORMAL_CLOSURE = 1000
- GOING_AWAY = 1001
- PROTOCOL_ERROR = 1002
- UNSUPPORTED_DATA = 1003
- # 1004 is reserved
- NO_STATUS_RCVD = 1005
- ABNORMAL_CLOSURE = 1006
- INVALID_DATA = 1007
- POLICY_VIOLATION = 1008
- MESSAGE_TOO_BIG = 1009
- MANDATORY_EXTENSION = 1010
- INTERNAL_ERROR = 1011
- SERVICE_RESTART = 1012
- TRY_AGAIN_LATER = 1013
- BAD_GATEWAY = 1014
- TLS_HANDSHAKE = 1015
-
-
-# See https://www.iana.org/assignments/websocket/websocket.xhtml
-CLOSE_CODE_EXPLANATIONS: dict[int, str] = {
- CloseCode.NORMAL_CLOSURE: "OK",
- CloseCode.GOING_AWAY: "going away",
- CloseCode.PROTOCOL_ERROR: "protocol error",
- CloseCode.UNSUPPORTED_DATA: "unsupported data",
- CloseCode.NO_STATUS_RCVD: "no status received [internal]",
- CloseCode.ABNORMAL_CLOSURE: "abnormal closure [internal]",
- CloseCode.INVALID_DATA: "invalid frame payload data",
- CloseCode.POLICY_VIOLATION: "policy violation",
- CloseCode.MESSAGE_TOO_BIG: "message too big",
- CloseCode.MANDATORY_EXTENSION: "mandatory extension",
- CloseCode.INTERNAL_ERROR: "internal error",
- CloseCode.SERVICE_RESTART: "service restart",
- CloseCode.TRY_AGAIN_LATER: "try again later",
- CloseCode.BAD_GATEWAY: "bad gateway",
- CloseCode.TLS_HANDSHAKE: "TLS handshake failure [internal]",
-}
-
-
-# Close code that are allowed in a close frame.
-# Using a set optimizes `code in EXTERNAL_CLOSE_CODES`.
-EXTERNAL_CLOSE_CODES = {
- CloseCode.NORMAL_CLOSURE,
- CloseCode.GOING_AWAY,
- CloseCode.PROTOCOL_ERROR,
- CloseCode.UNSUPPORTED_DATA,
- CloseCode.INVALID_DATA,
- CloseCode.POLICY_VIOLATION,
- CloseCode.MESSAGE_TOO_BIG,
- CloseCode.MANDATORY_EXTENSION,
- CloseCode.INTERNAL_ERROR,
- CloseCode.SERVICE_RESTART,
- CloseCode.TRY_AGAIN_LATER,
- CloseCode.BAD_GATEWAY,
-}
-
-
-OK_CLOSE_CODES = {
- CloseCode.NORMAL_CLOSURE,
- CloseCode.GOING_AWAY,
- CloseCode.NO_STATUS_RCVD,
-}
-
-
-BytesLike = bytes, bytearray, memoryview
-
-
-@dataclasses.dataclass
-class Frame:
- """
- WebSocket frame.
-
- Attributes:
- opcode: Opcode.
- data: Payload data.
- fin: FIN bit.
- rsv1: RSV1 bit.
- rsv2: RSV2 bit.
- rsv3: RSV3 bit.
-
- Only these fields are needed. The MASK bit, payload length and masking-key
- are handled on the fly when parsing and serializing frames.
-
- """
-
- opcode: Opcode
- data: bytes
- fin: bool = True
- rsv1: bool = False
- rsv2: bool = False
- rsv3: bool = False
-
- def __str__(self) -> str:
- """
- Return a human-readable representation of a frame.
-
- """
- coding = None
- length = f"{len(self.data)} byte{'' if len(self.data) == 1 else 's'}"
- non_final = "" if self.fin else "continued"
-
- if self.opcode is OP_TEXT:
- # Decoding only the beginning and the end is needlessly hard.
- # Decode the entire payload then elide later if necessary.
- data = repr(self.data.decode())
- elif self.opcode is OP_BINARY:
- # We'll show at most the first 16 bytes and the last 8 bytes.
- # Encode just what we need, plus two dummy bytes to elide later.
- binary = self.data
- if len(binary) > 25:
- binary = b"".join([binary[:16], b"\x00\x00", binary[-8:]])
- data = " ".join(f"{byte:02x}" for byte in binary)
- elif self.opcode is OP_CLOSE:
- data = str(Close.parse(self.data))
- elif self.data:
- # We don't know if a Continuation frame contains text or binary.
- # Ping and Pong frames could contain UTF-8.
- # Attempt to decode as UTF-8 and display it as text; fallback to
- # binary. If self.data is a memoryview, it has no decode() method,
- # which raises AttributeError.
- try:
- data = repr(self.data.decode())
- coding = "text"
- except (UnicodeDecodeError, AttributeError):
- binary = self.data
- if len(binary) > 25:
- binary = b"".join([binary[:16], b"\x00\x00", binary[-8:]])
- data = " ".join(f"{byte:02x}" for byte in binary)
- coding = "binary"
- else:
- data = "''"
-
- if len(data) > 75:
- data = data[:48] + "..." + data[-24:]
-
- metadata = ", ".join(filter(None, [coding, length, non_final]))
-
- return f"{self.opcode.name} {data} [{metadata}]"
-
- @classmethod
- def parse(
- cls,
- read_exact: Callable[[int], Generator[None, None, bytes]],
- *,
- mask: bool,
- max_size: Optional[int] = None,
- extensions: Optional[Sequence[extensions.Extension]] = None,
- ) -> Generator[None, None, Frame]:
- """
- Parse a WebSocket frame.
-
- This is a generator-based coroutine.
-
- Args:
- read_exact: generator-based coroutine that reads the requested
- bytes or raises an exception if there isn't enough data.
- mask: whether the frame should be masked i.e. whether the read
- happens on the server side.
- max_size: maximum payload size in bytes.
- extensions: list of extensions, applied in reverse order.
-
- Raises:
- EOFError: if the connection is closed without a full WebSocket frame.
- UnicodeDecodeError: if the frame contains invalid UTF-8.
- PayloadTooBig: if the frame's payload size exceeds ``max_size``.
- ProtocolError: if the frame contains incorrect values.
-
- """
- # Read the header.
- data = yield from read_exact(2)
- head1, head2 = struct.unpack("!BB", data)
-
- # While not Pythonic, this is marginally faster than calling bool().
- fin = True if head1 & 0b10000000 else False
- rsv1 = True if head1 & 0b01000000 else False
- rsv2 = True if head1 & 0b00100000 else False
- rsv3 = True if head1 & 0b00010000 else False
-
- try:
- opcode = Opcode(head1 & 0b00001111)
- except ValueError as exc:
- raise exceptions.ProtocolError("invalid opcode") from exc
-
- if (True if head2 & 0b10000000 else False) != mask:
- raise exceptions.ProtocolError("incorrect masking")
-
- length = head2 & 0b01111111
- if length == 126:
- data = yield from read_exact(2)
- (length,) = struct.unpack("!H", data)
- elif length == 127:
- data = yield from read_exact(8)
- (length,) = struct.unpack("!Q", data)
- if max_size is not None and length > max_size:
- raise exceptions.PayloadTooBig(
- f"over size limit ({length} > {max_size} bytes)"
- )
- if mask:
- mask_bytes = yield from read_exact(4)
-
- # Read the data.
- data = yield from read_exact(length)
- if mask:
- data = apply_mask(data, mask_bytes)
-
- frame = cls(opcode, data, fin, rsv1, rsv2, rsv3)
-
- if extensions is None:
- extensions = []
- for extension in reversed(extensions):
- frame = extension.decode(frame, max_size=max_size)
-
- frame.check()
-
- return frame
-
- def serialize(
- self,
- *,
- mask: bool,
- extensions: Optional[Sequence[extensions.Extension]] = None,
- ) -> bytes:
- """
- Serialize a WebSocket frame.
-
- Args:
- mask: whether the frame should be masked i.e. whether the write
- happens on the client side.
- extensions: list of extensions, applied in order.
-
- Raises:
- ProtocolError: if the frame contains incorrect values.
-
- """
- self.check()
-
- if extensions is None:
- extensions = []
- for extension in extensions:
- self = extension.encode(self)
-
- output = io.BytesIO()
-
- # Prepare the header.
- head1 = (
- (0b10000000 if self.fin else 0)
- | (0b01000000 if self.rsv1 else 0)
- | (0b00100000 if self.rsv2 else 0)
- | (0b00010000 if self.rsv3 else 0)
- | self.opcode
- )
-
- head2 = 0b10000000 if mask else 0
-
- length = len(self.data)
- if length < 126:
- output.write(struct.pack("!BB", head1, head2 | length))
- elif length < 65536:
- output.write(struct.pack("!BBH", head1, head2 | 126, length))
- else:
- output.write(struct.pack("!BBQ", head1, head2 | 127, length))
-
- if mask:
- mask_bytes = secrets.token_bytes(4)
- output.write(mask_bytes)
-
- # Prepare the data.
- if mask:
- data = apply_mask(self.data, mask_bytes)
- else:
- data = self.data
- output.write(data)
-
- return output.getvalue()
-
- def check(self) -> None:
- """
- Check that reserved bits and opcode have acceptable values.
-
- Raises:
- ProtocolError: if a reserved bit or the opcode is invalid.
-
- """
- if self.rsv1 or self.rsv2 or self.rsv3:
- raise exceptions.ProtocolError("reserved bits must be 0")
-
- if self.opcode in CTRL_OPCODES:
- if len(self.data) > 125:
- raise exceptions.ProtocolError("control frame too long")
- if not self.fin:
- raise exceptions.ProtocolError("fragmented control frame")
-
-
-def prepare_data(data: Data) -> Tuple[int, bytes]:
- """
- Convert a string or byte-like object to an opcode and a bytes-like object.
-
- This function is designed for data frames.
-
- If ``data`` is a :class:`str`, return ``OP_TEXT`` and a :class:`bytes`
- object encoding ``data`` in UTF-8.
-
- If ``data`` is a bytes-like object, return ``OP_BINARY`` and a bytes-like
- object.
-
- Raises:
- TypeError: if ``data`` doesn't have a supported type.
-
- """
- if isinstance(data, str):
- return OP_TEXT, data.encode("utf-8")
- elif isinstance(data, BytesLike):
- return OP_BINARY, data
- else:
- raise TypeError("data must be str or bytes-like")
-
-
-def prepare_ctrl(data: Data) -> bytes:
- """
- Convert a string or byte-like object to bytes.
-
- This function is designed for ping and pong frames.
-
- If ``data`` is a :class:`str`, return a :class:`bytes` object encoding
- ``data`` in UTF-8.
-
- If ``data`` is a bytes-like object, return a :class:`bytes` object.
-
- Raises:
- TypeError: if ``data`` doesn't have a supported type.
-
- """
- if isinstance(data, str):
- return data.encode("utf-8")
- elif isinstance(data, BytesLike):
- return bytes(data)
- else:
- raise TypeError("data must be str or bytes-like")
-
-
-@dataclasses.dataclass
-class Close:
- """
- Code and reason for WebSocket close frames.
-
- Attributes:
- code: Close code.
- reason: Close reason.
-
- """
-
- code: int
- reason: str
-
- def __str__(self) -> str:
- """
- Return a human-readable representation of a close code and reason.
-
- """
- if 3000 <= self.code < 4000:
- explanation = "registered"
- elif 4000 <= self.code < 5000:
- explanation = "private use"
- else:
- explanation = CLOSE_CODE_EXPLANATIONS.get(self.code, "unknown")
- result = f"{self.code} ({explanation})"
-
- if self.reason:
- result = f"{result} {self.reason}"
-
- return result
-
- @classmethod
- def parse(cls, data: bytes) -> Close:
- """
- Parse the payload of a close frame.
-
- Args:
- data: payload of the close frame.
-
- Raises:
- ProtocolError: if data is ill-formed.
- UnicodeDecodeError: if the reason isn't valid UTF-8.
-
- """
- if len(data) >= 2:
- (code,) = struct.unpack("!H", data[:2])
- reason = data[2:].decode("utf-8")
- close = cls(code, reason)
- close.check()
- return close
- elif len(data) == 0:
- return cls(CloseCode.NO_STATUS_RCVD, "")
- else:
- raise exceptions.ProtocolError("close frame too short")
-
- def serialize(self) -> bytes:
- """
- Serialize the payload of a close frame.
-
- """
- self.check()
- return struct.pack("!H", self.code) + self.reason.encode("utf-8")
-
- def check(self) -> None:
- """
- Check that the close code has a valid value for a close frame.
-
- Raises:
- ProtocolError: if the close code is invalid.
-
- """
- if not (self.code in EXTERNAL_CLOSE_CODES or 3000 <= self.code < 5000):
- raise exceptions.ProtocolError("invalid status code")
diff --git a/venv/lib/python3.11/site-packages/websockets/headers.py b/venv/lib/python3.11/site-packages/websockets/headers.py
deleted file mode 100644
index 9ae3035..0000000
--- a/venv/lib/python3.11/site-packages/websockets/headers.py
+++ /dev/null
@@ -1,587 +0,0 @@
-from __future__ import annotations
-
-import base64
-import binascii
-import ipaddress
-import re
-from typing import Callable, List, Optional, Sequence, Tuple, TypeVar, cast
-
-from . import exceptions
-from .typing import (
- ConnectionOption,
- ExtensionHeader,
- ExtensionName,
- ExtensionParameter,
- Subprotocol,
- UpgradeProtocol,
-)
-
-
-__all__ = [
- "build_host",
- "parse_connection",
- "parse_upgrade",
- "parse_extension",
- "build_extension",
- "parse_subprotocol",
- "build_subprotocol",
- "validate_subprotocols",
- "build_www_authenticate_basic",
- "parse_authorization_basic",
- "build_authorization_basic",
-]
-
-
-T = TypeVar("T")
-
-
-def build_host(host: str, port: int, secure: bool) -> str:
- """
- Build a ``Host`` header.
-
- """
- # https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.2
- # IPv6 addresses must be enclosed in brackets.
- try:
- address = ipaddress.ip_address(host)
- except ValueError:
- # host is a hostname
- pass
- else:
- # host is an IP address
- if address.version == 6:
- host = f"[{host}]"
-
- if port != (443 if secure else 80):
- host = f"{host}:{port}"
-
- return host
-
-
-# To avoid a dependency on a parsing library, we implement manually the ABNF
-# described in https://www.rfc-editor.org/rfc/rfc6455.html#section-9.1 and
-# https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B.
-
-
-def peek_ahead(header: str, pos: int) -> Optional[str]:
- """
- Return the next character from ``header`` at the given position.
-
- Return :obj:`None` at the end of ``header``.
-
- We never need to peek more than one character ahead.
-
- """
- return None if pos == len(header) else header[pos]
-
-
-_OWS_re = re.compile(r"[\t ]*")
-
-
-def parse_OWS(header: str, pos: int) -> int:
- """
- Parse optional whitespace from ``header`` at the given position.
-
- Return the new position.
-
- The whitespace itself isn't returned because it isn't significant.
-
- """
- # There's always a match, possibly empty, whose content doesn't matter.
- match = _OWS_re.match(header, pos)
- assert match is not None
- return match.end()
-
-
-_token_re = re.compile(r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
-
-
-def parse_token(header: str, pos: int, header_name: str) -> Tuple[str, int]:
- """
- Parse a token from ``header`` at the given position.
-
- Return the token value and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- match = _token_re.match(header, pos)
- if match is None:
- raise exceptions.InvalidHeaderFormat(header_name, "expected token", header, pos)
- return match.group(), match.end()
-
-
-_quoted_string_re = re.compile(
- r'"(?:[\x09\x20-\x21\x23-\x5b\x5d-\x7e]|\\[\x09\x20-\x7e\x80-\xff])*"'
-)
-
-
-_unquote_re = re.compile(r"\\([\x09\x20-\x7e\x80-\xff])")
-
-
-def parse_quoted_string(header: str, pos: int, header_name: str) -> Tuple[str, int]:
- """
- Parse a quoted string from ``header`` at the given position.
-
- Return the unquoted value and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- match = _quoted_string_re.match(header, pos)
- if match is None:
- raise exceptions.InvalidHeaderFormat(
- header_name, "expected quoted string", header, pos
- )
- return _unquote_re.sub(r"\1", match.group()[1:-1]), match.end()
-
-
-_quotable_re = re.compile(r"[\x09\x20-\x7e\x80-\xff]*")
-
-
-_quote_re = re.compile(r"([\x22\x5c])")
-
-
-def build_quoted_string(value: str) -> str:
- """
- Format ``value`` as a quoted string.
-
- This is the reverse of :func:`parse_quoted_string`.
-
- """
- match = _quotable_re.fullmatch(value)
- if match is None:
- raise ValueError("invalid characters for quoted-string encoding")
- return '"' + _quote_re.sub(r"\\\1", value) + '"'
-
-
-def parse_list(
- parse_item: Callable[[str, int, str], Tuple[T, int]],
- header: str,
- pos: int,
- header_name: str,
-) -> List[T]:
- """
- Parse a comma-separated list from ``header`` at the given position.
-
- This is appropriate for parsing values with the following grammar:
-
- 1#item
-
- ``parse_item`` parses one item.
-
- ``header`` is assumed not to start or end with whitespace.
-
- (This function is designed for parsing an entire header value and
- :func:`~websockets.http.read_headers` strips whitespace from values.)
-
- Return a list of items.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- # Per https://www.rfc-editor.org/rfc/rfc7230.html#section-7, "a recipient
- # MUST parse and ignore a reasonable number of empty list elements";
- # hence while loops that remove extra delimiters.
-
- # Remove extra delimiters before the first item.
- while peek_ahead(header, pos) == ",":
- pos = parse_OWS(header, pos + 1)
-
- items = []
- while True:
- # Loop invariant: a item starts at pos in header.
- item, pos = parse_item(header, pos, header_name)
- items.append(item)
- pos = parse_OWS(header, pos)
-
- # We may have reached the end of the header.
- if pos == len(header):
- break
-
- # There must be a delimiter after each element except the last one.
- if peek_ahead(header, pos) == ",":
- pos = parse_OWS(header, pos + 1)
- else:
- raise exceptions.InvalidHeaderFormat(
- header_name, "expected comma", header, pos
- )
-
- # Remove extra delimiters before the next item.
- while peek_ahead(header, pos) == ",":
- pos = parse_OWS(header, pos + 1)
-
- # We may have reached the end of the header.
- if pos == len(header):
- break
-
- # Since we only advance in the header by one character with peek_ahead()
- # or with the end position of a regex match, we can't overshoot the end.
- assert pos == len(header)
-
- return items
-
-
-def parse_connection_option(
- header: str, pos: int, header_name: str
-) -> Tuple[ConnectionOption, int]:
- """
- Parse a Connection option from ``header`` at the given position.
-
- Return the protocol value and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- item, pos = parse_token(header, pos, header_name)
- return cast(ConnectionOption, item), pos
-
-
-def parse_connection(header: str) -> List[ConnectionOption]:
- """
- Parse a ``Connection`` header.
-
- Return a list of HTTP connection options.
-
- Args
- header: value of the ``Connection`` header.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- return parse_list(parse_connection_option, header, 0, "Connection")
-
-
-_protocol_re = re.compile(
- r"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+(?:/[-!#$%&\'*+.^_`|~0-9a-zA-Z]+)?"
-)
-
-
-def parse_upgrade_protocol(
- header: str, pos: int, header_name: str
-) -> Tuple[UpgradeProtocol, int]:
- """
- Parse an Upgrade protocol from ``header`` at the given position.
-
- Return the protocol value and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- match = _protocol_re.match(header, pos)
- if match is None:
- raise exceptions.InvalidHeaderFormat(
- header_name, "expected protocol", header, pos
- )
- return cast(UpgradeProtocol, match.group()), match.end()
-
-
-def parse_upgrade(header: str) -> List[UpgradeProtocol]:
- """
- Parse an ``Upgrade`` header.
-
- Return a list of HTTP protocols.
-
- Args:
- header: value of the ``Upgrade`` header.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- return parse_list(parse_upgrade_protocol, header, 0, "Upgrade")
-
-
-def parse_extension_item_param(
- header: str, pos: int, header_name: str
-) -> Tuple[ExtensionParameter, int]:
- """
- Parse a single extension parameter from ``header`` at the given position.
-
- Return a ``(name, value)`` pair and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- # Extract parameter name.
- name, pos = parse_token(header, pos, header_name)
- pos = parse_OWS(header, pos)
- # Extract parameter value, if there is one.
- value: Optional[str] = None
- if peek_ahead(header, pos) == "=":
- pos = parse_OWS(header, pos + 1)
- if peek_ahead(header, pos) == '"':
- pos_before = pos # for proper error reporting below
- value, pos = parse_quoted_string(header, pos, header_name)
- # https://www.rfc-editor.org/rfc/rfc6455.html#section-9.1 says:
- # the value after quoted-string unescaping MUST conform to
- # the 'token' ABNF.
- if _token_re.fullmatch(value) is None:
- raise exceptions.InvalidHeaderFormat(
- header_name, "invalid quoted header content", header, pos_before
- )
- else:
- value, pos = parse_token(header, pos, header_name)
- pos = parse_OWS(header, pos)
-
- return (name, value), pos
-
-
-def parse_extension_item(
- header: str, pos: int, header_name: str
-) -> Tuple[ExtensionHeader, int]:
- """
- Parse an extension definition from ``header`` at the given position.
-
- Return an ``(extension name, parameters)`` pair, where ``parameters`` is a
- list of ``(name, value)`` pairs, and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- # Extract extension name.
- name, pos = parse_token(header, pos, header_name)
- pos = parse_OWS(header, pos)
- # Extract all parameters.
- parameters = []
- while peek_ahead(header, pos) == ";":
- pos = parse_OWS(header, pos + 1)
- parameter, pos = parse_extension_item_param(header, pos, header_name)
- parameters.append(parameter)
- return (cast(ExtensionName, name), parameters), pos
-
-
-def parse_extension(header: str) -> List[ExtensionHeader]:
- """
- Parse a ``Sec-WebSocket-Extensions`` header.
-
- Return a list of WebSocket extensions and their parameters in this format::
-
- [
- (
- 'extension name',
- [
- ('parameter name', 'parameter value'),
- ....
- ]
- ),
- ...
- ]
-
- Parameter values are :obj:`None` when no value is provided.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- return parse_list(parse_extension_item, header, 0, "Sec-WebSocket-Extensions")
-
-
-parse_extension_list = parse_extension # alias for backwards compatibility
-
-
-def build_extension_item(
- name: ExtensionName, parameters: List[ExtensionParameter]
-) -> str:
- """
- Build an extension definition.
-
- This is the reverse of :func:`parse_extension_item`.
-
- """
- return "; ".join(
- [cast(str, name)]
- + [
- # Quoted strings aren't necessary because values are always tokens.
- name if value is None else f"{name}={value}"
- for name, value in parameters
- ]
- )
-
-
-def build_extension(extensions: Sequence[ExtensionHeader]) -> str:
- """
- Build a ``Sec-WebSocket-Extensions`` header.
-
- This is the reverse of :func:`parse_extension`.
-
- """
- return ", ".join(
- build_extension_item(name, parameters) for name, parameters in extensions
- )
-
-
-build_extension_list = build_extension # alias for backwards compatibility
-
-
-def parse_subprotocol_item(
- header: str, pos: int, header_name: str
-) -> Tuple[Subprotocol, int]:
- """
- Parse a subprotocol from ``header`` at the given position.
-
- Return the subprotocol value and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- item, pos = parse_token(header, pos, header_name)
- return cast(Subprotocol, item), pos
-
-
-def parse_subprotocol(header: str) -> List[Subprotocol]:
- """
- Parse a ``Sec-WebSocket-Protocol`` header.
-
- Return a list of WebSocket subprotocols.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- return parse_list(parse_subprotocol_item, header, 0, "Sec-WebSocket-Protocol")
-
-
-parse_subprotocol_list = parse_subprotocol # alias for backwards compatibility
-
-
-def build_subprotocol(subprotocols: Sequence[Subprotocol]) -> str:
- """
- Build a ``Sec-WebSocket-Protocol`` header.
-
- This is the reverse of :func:`parse_subprotocol`.
-
- """
- return ", ".join(subprotocols)
-
-
-build_subprotocol_list = build_subprotocol # alias for backwards compatibility
-
-
-def validate_subprotocols(subprotocols: Sequence[Subprotocol]) -> None:
- """
- Validate that ``subprotocols`` is suitable for :func:`build_subprotocol`.
-
- """
- if not isinstance(subprotocols, Sequence):
- raise TypeError("subprotocols must be a list")
- if isinstance(subprotocols, str):
- raise TypeError("subprotocols must be a list, not a str")
- for subprotocol in subprotocols:
- if not _token_re.fullmatch(subprotocol):
- raise ValueError(f"invalid subprotocol: {subprotocol}")
-
-
-def build_www_authenticate_basic(realm: str) -> str:
- """
- Build a ``WWW-Authenticate`` header for HTTP Basic Auth.
-
- Args:
- realm: identifier of the protection space.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7617.html#section-2
- realm = build_quoted_string(realm)
- charset = build_quoted_string("UTF-8")
- return f"Basic realm={realm}, charset={charset}"
-
-
-_token68_re = re.compile(r"[A-Za-z0-9-._~+/]+=*")
-
-
-def parse_token68(header: str, pos: int, header_name: str) -> Tuple[str, int]:
- """
- Parse a token68 from ``header`` at the given position.
-
- Return the token value and the new position.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
-
- """
- match = _token68_re.match(header, pos)
- if match is None:
- raise exceptions.InvalidHeaderFormat(
- header_name, "expected token68", header, pos
- )
- return match.group(), match.end()
-
-
-def parse_end(header: str, pos: int, header_name: str) -> None:
- """
- Check that parsing reached the end of header.
-
- """
- if pos < len(header):
- raise exceptions.InvalidHeaderFormat(header_name, "trailing data", header, pos)
-
-
-def parse_authorization_basic(header: str) -> Tuple[str, str]:
- """
- Parse an ``Authorization`` header for HTTP Basic Auth.
-
- Return a ``(username, password)`` tuple.
-
- Args:
- header: value of the ``Authorization`` header.
-
- Raises:
- InvalidHeaderFormat: on invalid inputs.
- InvalidHeaderValue: on unsupported inputs.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7235.html#section-2.1
- # https://www.rfc-editor.org/rfc/rfc7617.html#section-2
- scheme, pos = parse_token(header, 0, "Authorization")
- if scheme.lower() != "basic":
- raise exceptions.InvalidHeaderValue(
- "Authorization",
- f"unsupported scheme: {scheme}",
- )
- if peek_ahead(header, pos) != " ":
- raise exceptions.InvalidHeaderFormat(
- "Authorization", "expected space after scheme", header, pos
- )
- pos += 1
- basic_credentials, pos = parse_token68(header, pos, "Authorization")
- parse_end(header, pos, "Authorization")
-
- try:
- user_pass = base64.b64decode(basic_credentials.encode()).decode()
- except binascii.Error:
- raise exceptions.InvalidHeaderValue(
- "Authorization",
- "expected base64-encoded credentials",
- ) from None
- try:
- username, password = user_pass.split(":", 1)
- except ValueError:
- raise exceptions.InvalidHeaderValue(
- "Authorization",
- "expected username:password credentials",
- ) from None
-
- return username, password
-
-
-def build_authorization_basic(username: str, password: str) -> str:
- """
- Build an ``Authorization`` header for HTTP Basic Auth.
-
- This is the reverse of :func:`parse_authorization_basic`.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7617.html#section-2
- assert ":" not in username
- user_pass = f"{username}:{password}"
- basic_credentials = base64.b64encode(user_pass.encode()).decode()
- return "Basic " + basic_credentials
diff --git a/venv/lib/python3.11/site-packages/websockets/http.py b/venv/lib/python3.11/site-packages/websockets/http.py
deleted file mode 100644
index 9f86f6a..0000000
--- a/venv/lib/python3.11/site-packages/websockets/http.py
+++ /dev/null
@@ -1,35 +0,0 @@
-from __future__ import annotations
-
-import sys
-import typing
-
-from .imports import lazy_import
-from .version import version as websockets_version
-
-
-# For backwards compatibility:
-
-
-# When type checking, import non-deprecated aliases eagerly. Else, import on demand.
-if typing.TYPE_CHECKING:
- from .datastructures import Headers, MultipleValuesError # noqa: F401
-else:
- lazy_import(
- globals(),
- # Headers and MultipleValuesError used to be defined in this module.
- aliases={
- "Headers": ".datastructures",
- "MultipleValuesError": ".datastructures",
- },
- deprecated_aliases={
- "read_request": ".legacy.http",
- "read_response": ".legacy.http",
- },
- )
-
-
-__all__ = ["USER_AGENT"]
-
-
-PYTHON_VERSION = "{}.{}".format(*sys.version_info)
-USER_AGENT = f"Python/{PYTHON_VERSION} websockets/{websockets_version}"
diff --git a/venv/lib/python3.11/site-packages/websockets/http11.py b/venv/lib/python3.11/site-packages/websockets/http11.py
deleted file mode 100644
index ec4e3b8..0000000
--- a/venv/lib/python3.11/site-packages/websockets/http11.py
+++ /dev/null
@@ -1,364 +0,0 @@
-from __future__ import annotations
-
-import dataclasses
-import re
-import warnings
-from typing import Callable, Generator, Optional
-
-from . import datastructures, exceptions
-
-
-# Maximum total size of headers is around 128 * 8 KiB = 1 MiB.
-MAX_HEADERS = 128
-
-# Limit request line and header lines. 8KiB is the most common default
-# configuration of popular HTTP servers.
-MAX_LINE = 8192
-
-# Support for HTTP response bodies is intended to read an error message
-# returned by a server. It isn't designed to perform large file transfers.
-MAX_BODY = 2**20 # 1 MiB
-
-
-def d(value: bytes) -> str:
- """
- Decode a bytestring for interpolating into an error message.
-
- """
- return value.decode(errors="backslashreplace")
-
-
-# See https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B.
-
-# Regex for validating header names.
-
-_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
-
-# Regex for validating header values.
-
-# We don't attempt to support obsolete line folding.
-
-# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff).
-
-# The ABNF is complicated because it attempts to express that optional
-# whitespace is ignored. We strip whitespace and don't revalidate that.
-
-# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
-
-_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*")
-
-
-@dataclasses.dataclass
-class Request:
- """
- WebSocket handshake request.
-
- Attributes:
- path: Request path, including optional query.
- headers: Request headers.
- """
-
- path: str
- headers: datastructures.Headers
- # body isn't useful is the context of this library.
-
- _exception: Optional[Exception] = None
-
- @property
- def exception(self) -> Optional[Exception]: # pragma: no cover
- warnings.warn(
- "Request.exception is deprecated; "
- "use ServerProtocol.handshake_exc instead",
- DeprecationWarning,
- )
- return self._exception
-
- @classmethod
- def parse(
- cls,
- read_line: Callable[[int], Generator[None, None, bytes]],
- ) -> Generator[None, None, Request]:
- """
- Parse a WebSocket handshake request.
-
- This is a generator-based coroutine.
-
- The request path isn't URL-decoded or validated in any way.
-
- The request path and headers are expected to contain only ASCII
- characters. Other characters are represented with surrogate escapes.
-
- :meth:`parse` doesn't attempt to read the request body because
- WebSocket handshake requests don't have one. If the request contains a
- body, it may be read from the data stream after :meth:`parse` returns.
-
- Args:
- read_line: generator-based coroutine that reads a LF-terminated
- line or raises an exception if there isn't enough data
-
- Raises:
- EOFError: if the connection is closed without a full HTTP request.
- SecurityError: if the request exceeds a security limit.
- ValueError: if the request isn't well formatted.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.1
-
- # Parsing is simple because fixed values are expected for method and
- # version and because path isn't checked. Since WebSocket software tends
- # to implement HTTP/1.1 strictly, there's little need for lenient parsing.
-
- try:
- request_line = yield from parse_line(read_line)
- except EOFError as exc:
- raise EOFError("connection closed while reading HTTP request line") from exc
-
- try:
- method, raw_path, version = request_line.split(b" ", 2)
- except ValueError: # not enough values to unpack (expected 3, got 1-2)
- raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None
-
- if method != b"GET":
- raise ValueError(f"unsupported HTTP method: {d(method)}")
- if version != b"HTTP/1.1":
- raise ValueError(f"unsupported HTTP version: {d(version)}")
- path = raw_path.decode("ascii", "surrogateescape")
-
- headers = yield from parse_headers(read_line)
-
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3
-
- if "Transfer-Encoding" in headers:
- raise NotImplementedError("transfer codings aren't supported")
-
- if "Content-Length" in headers:
- raise ValueError("unsupported request body")
-
- return cls(path, headers)
-
- def serialize(self) -> bytes:
- """
- Serialize a WebSocket handshake request.
-
- """
- # Since the request line and headers only contain ASCII characters,
- # we can keep this simple.
- request = f"GET {self.path} HTTP/1.1\r\n".encode()
- request += self.headers.serialize()
- return request
-
-
-@dataclasses.dataclass
-class Response:
- """
- WebSocket handshake response.
-
- Attributes:
- status_code: Response code.
- reason_phrase: Response reason.
- headers: Response headers.
- body: Response body, if any.
-
- """
-
- status_code: int
- reason_phrase: str
- headers: datastructures.Headers
- body: Optional[bytes] = None
-
- _exception: Optional[Exception] = None
-
- @property
- def exception(self) -> Optional[Exception]: # pragma: no cover
- warnings.warn(
- "Response.exception is deprecated; "
- "use ClientProtocol.handshake_exc instead",
- DeprecationWarning,
- )
- return self._exception
-
- @classmethod
- def parse(
- cls,
- read_line: Callable[[int], Generator[None, None, bytes]],
- read_exact: Callable[[int], Generator[None, None, bytes]],
- read_to_eof: Callable[[int], Generator[None, None, bytes]],
- ) -> Generator[None, None, Response]:
- """
- Parse a WebSocket handshake response.
-
- This is a generator-based coroutine.
-
- The reason phrase and headers are expected to contain only ASCII
- characters. Other characters are represented with surrogate escapes.
-
- Args:
- read_line: generator-based coroutine that reads a LF-terminated
- line or raises an exception if there isn't enough data.
- read_exact: generator-based coroutine that reads the requested
- bytes or raises an exception if there isn't enough data.
- read_to_eof: generator-based coroutine that reads until the end
- of the stream.
-
- Raises:
- EOFError: if the connection is closed without a full HTTP response.
- SecurityError: if the response exceeds a security limit.
- LookupError: if the response isn't well formatted.
- ValueError: if the response isn't well formatted.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.2
-
- try:
- status_line = yield from parse_line(read_line)
- except EOFError as exc:
- raise EOFError("connection closed while reading HTTP status line") from exc
-
- try:
- version, raw_status_code, raw_reason = status_line.split(b" ", 2)
- except ValueError: # not enough values to unpack (expected 3, got 1-2)
- raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None
-
- if version != b"HTTP/1.1":
- raise ValueError(f"unsupported HTTP version: {d(version)}")
- try:
- status_code = int(raw_status_code)
- except ValueError: # invalid literal for int() with base 10
- raise ValueError(
- f"invalid HTTP status code: {d(raw_status_code)}"
- ) from None
- if not 100 <= status_code < 1000:
- raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}")
- if not _value_re.fullmatch(raw_reason):
- raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}")
- reason = raw_reason.decode()
-
- headers = yield from parse_headers(read_line)
-
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.3.3
-
- if "Transfer-Encoding" in headers:
- raise NotImplementedError("transfer codings aren't supported")
-
- # Since websockets only does GET requests (no HEAD, no CONNECT), all
- # responses except 1xx, 204, and 304 include a message body.
- if 100 <= status_code < 200 or status_code == 204 or status_code == 304:
- body = None
- else:
- content_length: Optional[int]
- try:
- # MultipleValuesError is sufficiently unlikely that we don't
- # attempt to handle it. Instead we document that its parent
- # class, LookupError, may be raised.
- raw_content_length = headers["Content-Length"]
- except KeyError:
- content_length = None
- else:
- content_length = int(raw_content_length)
-
- if content_length is None:
- try:
- body = yield from read_to_eof(MAX_BODY)
- except RuntimeError:
- raise exceptions.SecurityError(
- f"body too large: over {MAX_BODY} bytes"
- )
- elif content_length > MAX_BODY:
- raise exceptions.SecurityError(
- f"body too large: {content_length} bytes"
- )
- else:
- body = yield from read_exact(content_length)
-
- return cls(status_code, reason, headers, body)
-
- def serialize(self) -> bytes:
- """
- Serialize a WebSocket handshake response.
-
- """
- # Since the status line and headers only contain ASCII characters,
- # we can keep this simple.
- response = f"HTTP/1.1 {self.status_code} {self.reason_phrase}\r\n".encode()
- response += self.headers.serialize()
- if self.body is not None:
- response += self.body
- return response
-
-
-def parse_headers(
- read_line: Callable[[int], Generator[None, None, bytes]],
-) -> Generator[None, None, datastructures.Headers]:
- """
- Parse HTTP headers.
-
- Non-ASCII characters are represented with surrogate escapes.
-
- Args:
- read_line: generator-based coroutine that reads a LF-terminated line
- or raises an exception if there isn't enough data.
-
- Raises:
- EOFError: if the connection is closed without complete headers.
- SecurityError: if the request exceeds a security limit.
- ValueError: if the request isn't well formatted.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.2
-
- # We don't attempt to support obsolete line folding.
-
- headers = datastructures.Headers()
- for _ in range(MAX_HEADERS + 1):
- try:
- line = yield from parse_line(read_line)
- except EOFError as exc:
- raise EOFError("connection closed while reading HTTP headers") from exc
- if line == b"":
- break
-
- try:
- raw_name, raw_value = line.split(b":", 1)
- except ValueError: # not enough values to unpack (expected 2, got 1)
- raise ValueError(f"invalid HTTP header line: {d(line)}") from None
- if not _token_re.fullmatch(raw_name):
- raise ValueError(f"invalid HTTP header name: {d(raw_name)}")
- raw_value = raw_value.strip(b" \t")
- if not _value_re.fullmatch(raw_value):
- raise ValueError(f"invalid HTTP header value: {d(raw_value)}")
-
- name = raw_name.decode("ascii") # guaranteed to be ASCII at this point
- value = raw_value.decode("ascii", "surrogateescape")
- headers[name] = value
-
- else:
- raise exceptions.SecurityError("too many HTTP headers")
-
- return headers
-
-
-def parse_line(
- read_line: Callable[[int], Generator[None, None, bytes]],
-) -> Generator[None, None, bytes]:
- """
- Parse a single line.
-
- CRLF is stripped from the return value.
-
- Args:
- read_line: generator-based coroutine that reads a LF-terminated line
- or raises an exception if there isn't enough data.
-
- Raises:
- EOFError: if the connection is closed without a CRLF.
- SecurityError: if the response exceeds a security limit.
-
- """
- try:
- line = yield from read_line(MAX_LINE)
- except RuntimeError:
- raise exceptions.SecurityError("line too long")
- # Not mandatory but safe - https://www.rfc-editor.org/rfc/rfc7230.html#section-3.5
- if not line.endswith(b"\r\n"):
- raise EOFError("line without CRLF")
- return line[:-2]
diff --git a/venv/lib/python3.11/site-packages/websockets/imports.py b/venv/lib/python3.11/site-packages/websockets/imports.py
deleted file mode 100644
index a6a59d4..0000000
--- a/venv/lib/python3.11/site-packages/websockets/imports.py
+++ /dev/null
@@ -1,99 +0,0 @@
-from __future__ import annotations
-
-import warnings
-from typing import Any, Dict, Iterable, Optional
-
-
-__all__ = ["lazy_import"]
-
-
-def import_name(name: str, source: str, namespace: Dict[str, Any]) -> Any:
- """
- Import ``name`` from ``source`` in ``namespace``.
-
- There are two use cases:
-
- - ``name`` is an object defined in ``source``;
- - ``name`` is a submodule of ``source``.
-
- Neither :func:`__import__` nor :func:`~importlib.import_module` does
- exactly this. :func:`__import__` is closer to the intended behavior.
-
- """
- level = 0
- while source[level] == ".":
- level += 1
- assert level < len(source), "importing from parent isn't supported"
- module = __import__(source[level:], namespace, None, [name], level)
- return getattr(module, name)
-
-
-def lazy_import(
- namespace: Dict[str, Any],
- aliases: Optional[Dict[str, str]] = None,
- deprecated_aliases: Optional[Dict[str, str]] = None,
-) -> None:
- """
- Provide lazy, module-level imports.
-
- Typical use::
-
- __getattr__, __dir__ = lazy_import(
- globals(),
- aliases={
- "<name>": "<source module>",
- ...
- },
- deprecated_aliases={
- ...,
- }
- )
-
- This function defines ``__getattr__`` and ``__dir__`` per :pep:`562`.
-
- """
- if aliases is None:
- aliases = {}
- if deprecated_aliases is None:
- deprecated_aliases = {}
-
- namespace_set = set(namespace)
- aliases_set = set(aliases)
- deprecated_aliases_set = set(deprecated_aliases)
-
- assert not namespace_set & aliases_set, "namespace conflict"
- assert not namespace_set & deprecated_aliases_set, "namespace conflict"
- assert not aliases_set & deprecated_aliases_set, "namespace conflict"
-
- package = namespace["__name__"]
-
- def __getattr__(name: str) -> Any:
- assert aliases is not None # mypy cannot figure this out
- try:
- source = aliases[name]
- except KeyError:
- pass
- else:
- return import_name(name, source, namespace)
-
- assert deprecated_aliases is not None # mypy cannot figure this out
- try:
- source = deprecated_aliases[name]
- except KeyError:
- pass
- else:
- warnings.warn(
- f"{package}.{name} is deprecated",
- DeprecationWarning,
- stacklevel=2,
- )
- return import_name(name, source, namespace)
-
- raise AttributeError(f"module {package!r} has no attribute {name!r}")
-
- namespace["__getattr__"] = __getattr__
-
- def __dir__() -> Iterable[str]:
- return sorted(namespace_set | aliases_set | deprecated_aliases_set)
-
- namespace["__dir__"] = __dir__
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__init__.py b/venv/lib/python3.11/site-packages/websockets/legacy/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 5384f2b..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/async_timeout.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/async_timeout.cpython-311.pyc
deleted file mode 100644
index f3e18ca..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/async_timeout.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/auth.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/auth.cpython-311.pyc
deleted file mode 100644
index e572023..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/auth.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/client.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/client.cpython-311.pyc
deleted file mode 100644
index 8ab650d..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/client.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/compatibility.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/compatibility.cpython-311.pyc
deleted file mode 100644
index e65d9ec..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/compatibility.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/framing.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/framing.cpython-311.pyc
deleted file mode 100644
index 47c4426..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/framing.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/handshake.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/handshake.cpython-311.pyc
deleted file mode 100644
index 0db3ae7..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/handshake.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/http.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/http.cpython-311.pyc
deleted file mode 100644
index 2199c73..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/http.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/protocol.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/protocol.cpython-311.pyc
deleted file mode 100644
index 002cb1f..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/protocol.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/server.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/server.cpython-311.pyc
deleted file mode 100644
index 6319b6e..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/__pycache__/server.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/async_timeout.py b/venv/lib/python3.11/site-packages/websockets/legacy/async_timeout.py
deleted file mode 100644
index 8264094..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/async_timeout.py
+++ /dev/null
@@ -1,265 +0,0 @@
-# From https://github.com/aio-libs/async-timeout/blob/master/async_timeout/__init__.py
-# Licensed under the Apache License (Apache-2.0)
-
-import asyncio
-import enum
-import sys
-import warnings
-from types import TracebackType
-from typing import Optional, Type
-
-
-# From https://github.com/python/typing_extensions/blob/main/src/typing_extensions.py
-# Licensed under the Python Software Foundation License (PSF-2.0)
-
-if sys.version_info >= (3, 11):
- from typing import final
-else:
- # @final exists in 3.8+, but we backport it for all versions
- # before 3.11 to keep support for the __final__ attribute.
- # See https://bugs.python.org/issue46342
- def final(f):
- """This decorator can be used to indicate to type checkers that
- the decorated method cannot be overridden, and decorated class
- cannot be subclassed. For example:
-
- class Base:
- @final
- def done(self) -> None:
- ...
- class Sub(Base):
- def done(self) -> None: # Error reported by type checker
- ...
- @final
- class Leaf:
- ...
- class Other(Leaf): # Error reported by type checker
- ...
-
- There is no runtime checking of these properties. The decorator
- sets the ``__final__`` attribute to ``True`` on the decorated object
- to allow runtime introspection.
- """
- try:
- f.__final__ = True
- except (AttributeError, TypeError):
- # Skip the attribute silently if it is not writable.
- # AttributeError happens if the object has __slots__ or a
- # read-only property, TypeError if it's a builtin class.
- pass
- return f
-
-
-# End https://github.com/aio-libs/async-timeout/blob/master/async_timeout/__init__.py
-
-__version__ = "4.0.2"
-
-
-__all__ = ("timeout", "timeout_at", "Timeout")
-
-
-def timeout(delay: Optional[float]) -> "Timeout":
- """timeout context manager.
-
- Useful in cases when you want to apply timeout logic around block
- of code or in cases when asyncio.wait_for is not suitable. For example:
-
- >>> async with timeout(0.001):
- ... async with aiohttp.get('https://github.com') as r:
- ... await r.text()
-
-
- delay - value in seconds or None to disable timeout logic
- """
- loop = asyncio.get_running_loop()
- if delay is not None:
- deadline = loop.time() + delay # type: Optional[float]
- else:
- deadline = None
- return Timeout(deadline, loop)
-
-
-def timeout_at(deadline: Optional[float]) -> "Timeout":
- """Schedule the timeout at absolute time.
-
- deadline argument points on the time in the same clock system
- as loop.time().
-
- Please note: it is not POSIX time but a time with
- undefined starting base, e.g. the time of the system power on.
-
- >>> async with timeout_at(loop.time() + 10):
- ... async with aiohttp.get('https://github.com') as r:
- ... await r.text()
-
-
- """
- loop = asyncio.get_running_loop()
- return Timeout(deadline, loop)
-
-
-class _State(enum.Enum):
- INIT = "INIT"
- ENTER = "ENTER"
- TIMEOUT = "TIMEOUT"
- EXIT = "EXIT"
-
-
-@final
-class Timeout:
- # Internal class, please don't instantiate it directly
- # Use timeout() and timeout_at() public factories instead.
- #
- # Implementation note: `async with timeout()` is preferred
- # over `with timeout()`.
- # While technically the Timeout class implementation
- # doesn't need to be async at all,
- # the `async with` statement explicitly points that
- # the context manager should be used from async function context.
- #
- # This design allows to avoid many silly misusages.
- #
- # TimeoutError is raised immediately when scheduled
- # if the deadline is passed.
- # The purpose is to time out as soon as possible
- # without waiting for the next await expression.
-
- __slots__ = ("_deadline", "_loop", "_state", "_timeout_handler")
-
- def __init__(
- self, deadline: Optional[float], loop: asyncio.AbstractEventLoop
- ) -> None:
- self._loop = loop
- self._state = _State.INIT
-
- self._timeout_handler = None # type: Optional[asyncio.Handle]
- if deadline is None:
- self._deadline = None # type: Optional[float]
- else:
- self.update(deadline)
-
- def __enter__(self) -> "Timeout":
- warnings.warn(
- "with timeout() is deprecated, use async with timeout() instead",
- DeprecationWarning,
- stacklevel=2,
- )
- self._do_enter()
- return self
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> Optional[bool]:
- self._do_exit(exc_type)
- return None
-
- async def __aenter__(self) -> "Timeout":
- self._do_enter()
- return self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_val: Optional[BaseException],
- exc_tb: Optional[TracebackType],
- ) -> Optional[bool]:
- self._do_exit(exc_type)
- return None
-
- @property
- def expired(self) -> bool:
- """Is timeout expired during execution?"""
- return self._state == _State.TIMEOUT
-
- @property
- def deadline(self) -> Optional[float]:
- return self._deadline
-
- def reject(self) -> None:
- """Reject scheduled timeout if any."""
- # cancel is maybe better name but
- # task.cancel() raises CancelledError in asyncio world.
- if self._state not in (_State.INIT, _State.ENTER):
- raise RuntimeError(f"invalid state {self._state.value}")
- self._reject()
-
- def _reject(self) -> None:
- if self._timeout_handler is not None:
- self._timeout_handler.cancel()
- self._timeout_handler = None
-
- def shift(self, delay: float) -> None:
- """Advance timeout on delay seconds.
-
- The delay can be negative.
-
- Raise RuntimeError if shift is called when deadline is not scheduled
- """
- deadline = self._deadline
- if deadline is None:
- raise RuntimeError("cannot shift timeout if deadline is not scheduled")
- self.update(deadline + delay)
-
- def update(self, deadline: float) -> None:
- """Set deadline to absolute value.
-
- deadline argument points on the time in the same clock system
- as loop.time().
-
- If new deadline is in the past the timeout is raised immediately.
-
- Please note: it is not POSIX time but a time with
- undefined starting base, e.g. the time of the system power on.
- """
- if self._state == _State.EXIT:
- raise RuntimeError("cannot reschedule after exit from context manager")
- if self._state == _State.TIMEOUT:
- raise RuntimeError("cannot reschedule expired timeout")
- if self._timeout_handler is not None:
- self._timeout_handler.cancel()
- self._deadline = deadline
- if self._state != _State.INIT:
- self._reschedule()
-
- def _reschedule(self) -> None:
- assert self._state == _State.ENTER
- deadline = self._deadline
- if deadline is None:
- return
-
- now = self._loop.time()
- if self._timeout_handler is not None:
- self._timeout_handler.cancel()
-
- task = asyncio.current_task()
- if deadline <= now:
- self._timeout_handler = self._loop.call_soon(self._on_timeout, task)
- else:
- self._timeout_handler = self._loop.call_at(deadline, self._on_timeout, task)
-
- def _do_enter(self) -> None:
- if self._state != _State.INIT:
- raise RuntimeError(f"invalid state {self._state.value}")
- self._state = _State.ENTER
- self._reschedule()
-
- def _do_exit(self, exc_type: Optional[Type[BaseException]]) -> None:
- if exc_type is asyncio.CancelledError and self._state == _State.TIMEOUT:
- self._timeout_handler = None
- raise asyncio.TimeoutError
- # timeout has not expired
- self._state = _State.EXIT
- self._reject()
- return None
-
- def _on_timeout(self, task: "asyncio.Task[None]") -> None:
- task.cancel()
- self._state = _State.TIMEOUT
- # drop the reference early
- self._timeout_handler = None
-
-
-# End https://github.com/aio-libs/async-timeout/blob/master/async_timeout/__init__.py
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/auth.py b/venv/lib/python3.11/site-packages/websockets/legacy/auth.py
deleted file mode 100644
index d342583..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/auth.py
+++ /dev/null
@@ -1,184 +0,0 @@
-from __future__ import annotations
-
-import functools
-import hmac
-import http
-from typing import Any, Awaitable, Callable, Iterable, Optional, Tuple, Union, cast
-
-from ..datastructures import Headers
-from ..exceptions import InvalidHeader
-from ..headers import build_www_authenticate_basic, parse_authorization_basic
-from .server import HTTPResponse, WebSocketServerProtocol
-
-
-__all__ = ["BasicAuthWebSocketServerProtocol", "basic_auth_protocol_factory"]
-
-Credentials = Tuple[str, str]
-
-
-def is_credentials(value: Any) -> bool:
- try:
- username, password = value
- except (TypeError, ValueError):
- return False
- else:
- return isinstance(username, str) and isinstance(password, str)
-
-
-class BasicAuthWebSocketServerProtocol(WebSocketServerProtocol):
- """
- WebSocket server protocol that enforces HTTP Basic Auth.
-
- """
-
- realm: str = ""
- """
- Scope of protection.
-
- If provided, it should contain only ASCII characters because the
- encoding of non-ASCII characters is undefined.
- """
-
- username: Optional[str] = None
- """Username of the authenticated user."""
-
- def __init__(
- self,
- *args: Any,
- realm: Optional[str] = None,
- check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,
- **kwargs: Any,
- ) -> None:
- if realm is not None:
- self.realm = realm # shadow class attribute
- self._check_credentials = check_credentials
- super().__init__(*args, **kwargs)
-
- async def check_credentials(self, username: str, password: str) -> bool:
- """
- Check whether credentials are authorized.
-
- This coroutine may be overridden in a subclass, for example to
- authenticate against a database or an external service.
-
- Args:
- username: HTTP Basic Auth username.
- password: HTTP Basic Auth password.
-
- Returns:
- bool: :obj:`True` if the handshake should continue;
- :obj:`False` if it should fail with an HTTP 401 error.
-
- """
- if self._check_credentials is not None:
- return await self._check_credentials(username, password)
-
- return False
-
- async def process_request(
- self,
- path: str,
- request_headers: Headers,
- ) -> Optional[HTTPResponse]:
- """
- Check HTTP Basic Auth and return an HTTP 401 response if needed.
-
- """
- try:
- authorization = request_headers["Authorization"]
- except KeyError:
- return (
- http.HTTPStatus.UNAUTHORIZED,
- [("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
- b"Missing credentials\n",
- )
-
- try:
- username, password = parse_authorization_basic(authorization)
- except InvalidHeader:
- return (
- http.HTTPStatus.UNAUTHORIZED,
- [("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
- b"Unsupported credentials\n",
- )
-
- if not await self.check_credentials(username, password):
- return (
- http.HTTPStatus.UNAUTHORIZED,
- [("WWW-Authenticate", build_www_authenticate_basic(self.realm))],
- b"Invalid credentials\n",
- )
-
- self.username = username
-
- return await super().process_request(path, request_headers)
-
-
-def basic_auth_protocol_factory(
- realm: Optional[str] = None,
- credentials: Optional[Union[Credentials, Iterable[Credentials]]] = None,
- check_credentials: Optional[Callable[[str, str], Awaitable[bool]]] = None,
- create_protocol: Optional[Callable[..., BasicAuthWebSocketServerProtocol]] = None,
-) -> Callable[..., BasicAuthWebSocketServerProtocol]:
- """
- Protocol factory that enforces HTTP Basic Auth.
-
- :func:`basic_auth_protocol_factory` is designed to integrate with
- :func:`~websockets.server.serve` like this::
-
- websockets.serve(
- ...,
- create_protocol=websockets.basic_auth_protocol_factory(
- realm="my dev server",
- credentials=("hello", "iloveyou"),
- )
- )
-
- Args:
- realm: Scope of protection. It should contain only ASCII characters
- because the encoding of non-ASCII characters is undefined.
- Refer to section 2.2 of :rfc:`7235` for details.
- credentials: Hard coded authorized credentials. It can be a
- ``(username, password)`` pair or a list of such pairs.
- check_credentials: Coroutine that verifies credentials.
- It receives ``username`` and ``password`` arguments
- and returns a :class:`bool`. One of ``credentials`` or
- ``check_credentials`` must be provided but not both.
- create_protocol: Factory that creates the protocol. By default, this
- is :class:`BasicAuthWebSocketServerProtocol`. It can be replaced
- by a subclass.
- Raises:
- TypeError: If the ``credentials`` or ``check_credentials`` argument is
- wrong.
-
- """
- if (credentials is None) == (check_credentials is None):
- raise TypeError("provide either credentials or check_credentials")
-
- if credentials is not None:
- if is_credentials(credentials):
- credentials_list = [cast(Credentials, credentials)]
- elif isinstance(credentials, Iterable):
- credentials_list = list(credentials)
- if not all(is_credentials(item) for item in credentials_list):
- raise TypeError(f"invalid credentials argument: {credentials}")
- else:
- raise TypeError(f"invalid credentials argument: {credentials}")
-
- credentials_dict = dict(credentials_list)
-
- async def check_credentials(username: str, password: str) -> bool:
- try:
- expected_password = credentials_dict[username]
- except KeyError:
- return False
- return hmac.compare_digest(expected_password, password)
-
- if create_protocol is None:
- create_protocol = BasicAuthWebSocketServerProtocol
-
- return functools.partial(
- create_protocol,
- realm=realm,
- check_credentials=check_credentials,
- )
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/client.py b/venv/lib/python3.11/site-packages/websockets/legacy/client.py
deleted file mode 100644
index 4862252..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/client.py
+++ /dev/null
@@ -1,705 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-import functools
-import logging
-import random
-import urllib.parse
-import warnings
-from types import TracebackType
-from typing import (
- Any,
- AsyncIterator,
- Callable,
- Generator,
- List,
- Optional,
- Sequence,
- Tuple,
- Type,
- cast,
-)
-
-from ..datastructures import Headers, HeadersLike
-from ..exceptions import (
- InvalidHandshake,
- InvalidHeader,
- InvalidMessage,
- InvalidStatusCode,
- NegotiationError,
- RedirectHandshake,
- SecurityError,
-)
-from ..extensions import ClientExtensionFactory, Extension
-from ..extensions.permessage_deflate import enable_client_permessage_deflate
-from ..headers import (
- build_authorization_basic,
- build_extension,
- build_host,
- build_subprotocol,
- parse_extension,
- parse_subprotocol,
- validate_subprotocols,
-)
-from ..http import USER_AGENT
-from ..typing import ExtensionHeader, LoggerLike, Origin, Subprotocol
-from ..uri import WebSocketURI, parse_uri
-from .compatibility import asyncio_timeout
-from .handshake import build_request, check_response
-from .http import read_response
-from .protocol import WebSocketCommonProtocol
-
-
-__all__ = ["connect", "unix_connect", "WebSocketClientProtocol"]
-
-
-class WebSocketClientProtocol(WebSocketCommonProtocol):
- """
- WebSocket client connection.
-
- :class:`WebSocketClientProtocol` provides :meth:`recv` and :meth:`send`
- coroutines for receiving and sending messages.
-
- It supports asynchronous iteration to receive incoming messages::
-
- async for message in websocket:
- await process(message)
-
- The iterator exits normally when the connection is closed with close code
- 1000 (OK) or 1001 (going away) or without a close code. It raises
- a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection
- is closed with any other code.
-
- See :func:`connect` for the documentation of ``logger``, ``origin``,
- ``extensions``, ``subprotocols``, ``extra_headers``, and
- ``user_agent_header``.
-
- See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
- documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
- ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
-
- """
-
- is_client = True
- side = "client"
-
- def __init__(
- self,
- *,
- logger: Optional[LoggerLike] = None,
- origin: Optional[Origin] = None,
- extensions: Optional[Sequence[ClientExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- extra_headers: Optional[HeadersLike] = None,
- user_agent_header: Optional[str] = USER_AGENT,
- **kwargs: Any,
- ) -> None:
- if logger is None:
- logger = logging.getLogger("websockets.client")
- super().__init__(logger=logger, **kwargs)
- self.origin = origin
- self.available_extensions = extensions
- self.available_subprotocols = subprotocols
- self.extra_headers = extra_headers
- self.user_agent_header = user_agent_header
-
- def write_http_request(self, path: str, headers: Headers) -> None:
- """
- Write request line and headers to the HTTP request.
-
- """
- self.path = path
- self.request_headers = headers
-
- if self.debug:
- self.logger.debug("> GET %s HTTP/1.1", path)
- for key, value in headers.raw_items():
- self.logger.debug("> %s: %s", key, value)
-
- # Since the path and headers only contain ASCII characters,
- # we can keep this simple.
- request = f"GET {path} HTTP/1.1\r\n"
- request += str(headers)
-
- self.transport.write(request.encode())
-
- async def read_http_response(self) -> Tuple[int, Headers]:
- """
- Read status line and headers from the HTTP response.
-
- If the response contains a body, it may be read from ``self.reader``
- after this coroutine returns.
-
- Raises:
- InvalidMessage: If the HTTP message is malformed or isn't an
- HTTP/1.1 GET response.
-
- """
- try:
- status_code, reason, headers = await read_response(self.reader)
- except Exception as exc:
- raise InvalidMessage("did not receive a valid HTTP response") from exc
-
- if self.debug:
- self.logger.debug("< HTTP/1.1 %d %s", status_code, reason)
- for key, value in headers.raw_items():
- self.logger.debug("< %s: %s", key, value)
-
- self.response_headers = headers
-
- return status_code, self.response_headers
-
- @staticmethod
- def process_extensions(
- headers: Headers,
- available_extensions: Optional[Sequence[ClientExtensionFactory]],
- ) -> List[Extension]:
- """
- Handle the Sec-WebSocket-Extensions HTTP response header.
-
- Check that each extension is supported, as well as its parameters.
-
- Return the list of accepted extensions.
-
- Raise :exc:`~websockets.exceptions.InvalidHandshake` to abort the
- connection.
-
- :rfc:`6455` leaves the rules up to the specification of each
- :extension.
-
- To provide this level of flexibility, for each extension accepted by
- the server, we check for a match with each extension available in the
- client configuration. If no match is found, an exception is raised.
-
- If several variants of the same extension are accepted by the server,
- it may be configured several times, which won't make sense in general.
- Extensions must implement their own requirements. For this purpose,
- the list of previously accepted extensions is provided.
-
- Other requirements, for example related to mandatory extensions or the
- order of extensions, may be implemented by overriding this method.
-
- """
- accepted_extensions: List[Extension] = []
-
- header_values = headers.get_all("Sec-WebSocket-Extensions")
-
- if header_values:
- if available_extensions is None:
- raise InvalidHandshake("no extensions supported")
-
- parsed_header_values: List[ExtensionHeader] = sum(
- [parse_extension(header_value) for header_value in header_values], []
- )
-
- for name, response_params in parsed_header_values:
- for extension_factory in available_extensions:
- # Skip non-matching extensions based on their name.
- if extension_factory.name != name:
- continue
-
- # Skip non-matching extensions based on their params.
- try:
- extension = extension_factory.process_response_params(
- response_params, accepted_extensions
- )
- except NegotiationError:
- continue
-
- # Add matching extension to the final list.
- accepted_extensions.append(extension)
-
- # Break out of the loop once we have a match.
- break
-
- # If we didn't break from the loop, no extension in our list
- # matched what the server sent. Fail the connection.
- else:
- raise NegotiationError(
- f"Unsupported extension: "
- f"name = {name}, params = {response_params}"
- )
-
- return accepted_extensions
-
- @staticmethod
- def process_subprotocol(
- headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
- ) -> Optional[Subprotocol]:
- """
- Handle the Sec-WebSocket-Protocol HTTP response header.
-
- Check that it contains exactly one supported subprotocol.
-
- Return the selected subprotocol.
-
- """
- subprotocol: Optional[Subprotocol] = None
-
- header_values = headers.get_all("Sec-WebSocket-Protocol")
-
- if header_values:
- if available_subprotocols is None:
- raise InvalidHandshake("no subprotocols supported")
-
- parsed_header_values: Sequence[Subprotocol] = sum(
- [parse_subprotocol(header_value) for header_value in header_values], []
- )
-
- if len(parsed_header_values) > 1:
- subprotocols = ", ".join(parsed_header_values)
- raise InvalidHandshake(f"multiple subprotocols: {subprotocols}")
-
- subprotocol = parsed_header_values[0]
-
- if subprotocol not in available_subprotocols:
- raise NegotiationError(f"unsupported subprotocol: {subprotocol}")
-
- return subprotocol
-
- async def handshake(
- self,
- wsuri: WebSocketURI,
- origin: Optional[Origin] = None,
- available_extensions: Optional[Sequence[ClientExtensionFactory]] = None,
- available_subprotocols: Optional[Sequence[Subprotocol]] = None,
- extra_headers: Optional[HeadersLike] = None,
- ) -> None:
- """
- Perform the client side of the opening handshake.
-
- Args:
- wsuri: URI of the WebSocket server.
- origin: Value of the ``Origin`` header.
- extensions: List of supported extensions, in order in which they
- should be negotiated and run.
- subprotocols: List of supported subprotocols, in order of decreasing
- preference.
- extra_headers: Arbitrary HTTP headers to add to the handshake request.
-
- Raises:
- InvalidHandshake: If the handshake fails.
-
- """
- request_headers = Headers()
-
- request_headers["Host"] = build_host(wsuri.host, wsuri.port, wsuri.secure)
-
- if wsuri.user_info:
- request_headers["Authorization"] = build_authorization_basic(
- *wsuri.user_info
- )
-
- if origin is not None:
- request_headers["Origin"] = origin
-
- key = build_request(request_headers)
-
- if available_extensions is not None:
- extensions_header = build_extension(
- [
- (extension_factory.name, extension_factory.get_request_params())
- for extension_factory in available_extensions
- ]
- )
- request_headers["Sec-WebSocket-Extensions"] = extensions_header
-
- if available_subprotocols is not None:
- protocol_header = build_subprotocol(available_subprotocols)
- request_headers["Sec-WebSocket-Protocol"] = protocol_header
-
- if self.extra_headers is not None:
- request_headers.update(self.extra_headers)
-
- if self.user_agent_header is not None:
- request_headers.setdefault("User-Agent", self.user_agent_header)
-
- self.write_http_request(wsuri.resource_name, request_headers)
-
- status_code, response_headers = await self.read_http_response()
- if status_code in (301, 302, 303, 307, 308):
- if "Location" not in response_headers:
- raise InvalidHeader("Location")
- raise RedirectHandshake(response_headers["Location"])
- elif status_code != 101:
- raise InvalidStatusCode(status_code, response_headers)
-
- check_response(response_headers, key)
-
- self.extensions = self.process_extensions(
- response_headers, available_extensions
- )
-
- self.subprotocol = self.process_subprotocol(
- response_headers, available_subprotocols
- )
-
- self.connection_open()
-
-
-class Connect:
- """
- Connect to the WebSocket server at ``uri``.
-
- Awaiting :func:`connect` yields a :class:`WebSocketClientProtocol` which
- can then be used to send and receive messages.
-
- :func:`connect` can be used as a asynchronous context manager::
-
- async with websockets.connect(...) as websocket:
- ...
-
- The connection is closed automatically when exiting the context.
-
- :func:`connect` can be used as an infinite asynchronous iterator to
- reconnect automatically on errors::
-
- async for websocket in websockets.connect(...):
- try:
- ...
- except websockets.ConnectionClosed:
- continue
-
- The connection is closed automatically after each iteration of the loop.
-
- If an error occurs while establishing the connection, :func:`connect`
- retries with exponential backoff. The backoff delay starts at three
- seconds and increases up to one minute.
-
- If an error occurs in the body of the loop, you can handle the exception
- and :func:`connect` will reconnect with the next iteration; or you can
- let the exception bubble up and break out of the loop. This lets you
- decide which errors trigger a reconnection and which errors are fatal.
-
- Args:
- uri: URI of the WebSocket server.
- create_protocol: Factory for the :class:`asyncio.Protocol` managing
- the connection. It defaults to :class:`WebSocketClientProtocol`.
- Set it to a wrapper or a subclass to customize connection handling.
- logger: Logger for this client.
- It defaults to ``logging.getLogger("websockets.client")``.
- See the :doc:`logging guide <../../topics/logging>` for details.
- compression: The "permessage-deflate" extension is enabled by default.
- Set ``compression`` to :obj:`None` to disable it. See the
- :doc:`compression guide <../../topics/compression>` for details.
- origin: Value of the ``Origin`` header, for servers that require it.
- extensions: List of supported extensions, in order in which they
- should be negotiated and run.
- subprotocols: List of supported subprotocols, in order of decreasing
- preference.
- extra_headers: Arbitrary HTTP headers to add to the handshake request.
- user_agent_header: Value of the ``User-Agent`` request header.
- It defaults to ``"Python/x.y.z websockets/X.Y"``.
- Setting it to :obj:`None` removes the header.
- open_timeout: Timeout for opening the connection in seconds.
- :obj:`None` disables the timeout.
-
- See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
- documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
- ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
-
- Any other keyword arguments are passed the event loop's
- :meth:`~asyncio.loop.create_connection` method.
-
- For example:
-
- * You can set ``ssl`` to a :class:`~ssl.SSLContext` to enforce TLS
- settings. When connecting to a ``wss://`` URI, if ``ssl`` isn't
- provided, a TLS context is created
- with :func:`~ssl.create_default_context`.
-
- * You can set ``host`` and ``port`` to connect to a different host and
- port from those found in ``uri``. This only changes the destination of
- the TCP connection. The host name from ``uri`` is still used in the TLS
- handshake for secure connections and in the ``Host`` header.
-
- Raises:
- InvalidURI: If ``uri`` isn't a valid WebSocket URI.
- OSError: If the TCP connection fails.
- InvalidHandshake: If the opening handshake fails.
- ~asyncio.TimeoutError: If the opening handshake times out.
-
- """
-
- MAX_REDIRECTS_ALLOWED = 10
-
- def __init__(
- self,
- uri: str,
- *,
- create_protocol: Optional[Callable[..., WebSocketClientProtocol]] = None,
- logger: Optional[LoggerLike] = None,
- compression: Optional[str] = "deflate",
- origin: Optional[Origin] = None,
- extensions: Optional[Sequence[ClientExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- extra_headers: Optional[HeadersLike] = None,
- user_agent_header: Optional[str] = USER_AGENT,
- open_timeout: Optional[float] = 10,
- ping_interval: Optional[float] = 20,
- ping_timeout: Optional[float] = 20,
- close_timeout: Optional[float] = None,
- max_size: Optional[int] = 2**20,
- max_queue: Optional[int] = 2**5,
- read_limit: int = 2**16,
- write_limit: int = 2**16,
- **kwargs: Any,
- ) -> None:
- # Backwards compatibility: close_timeout used to be called timeout.
- timeout: Optional[float] = kwargs.pop("timeout", None)
- if timeout is None:
- timeout = 10
- else:
- warnings.warn("rename timeout to close_timeout", DeprecationWarning)
- # If both are specified, timeout is ignored.
- if close_timeout is None:
- close_timeout = timeout
-
- # Backwards compatibility: create_protocol used to be called klass.
- klass: Optional[Type[WebSocketClientProtocol]] = kwargs.pop("klass", None)
- if klass is None:
- klass = WebSocketClientProtocol
- else:
- warnings.warn("rename klass to create_protocol", DeprecationWarning)
- # If both are specified, klass is ignored.
- if create_protocol is None:
- create_protocol = klass
-
- # Backwards compatibility: recv() used to return None on closed connections
- legacy_recv: bool = kwargs.pop("legacy_recv", False)
-
- # Backwards compatibility: the loop parameter used to be supported.
- _loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None)
- if _loop is None:
- loop = asyncio.get_event_loop()
- else:
- loop = _loop
- warnings.warn("remove loop argument", DeprecationWarning)
-
- wsuri = parse_uri(uri)
- if wsuri.secure:
- kwargs.setdefault("ssl", True)
- elif kwargs.get("ssl") is not None:
- raise ValueError(
- "connect() received a ssl argument for a ws:// URI, "
- "use a wss:// URI to enable TLS"
- )
-
- if compression == "deflate":
- extensions = enable_client_permessage_deflate(extensions)
- elif compression is not None:
- raise ValueError(f"unsupported compression: {compression}")
-
- if subprotocols is not None:
- validate_subprotocols(subprotocols)
-
- factory = functools.partial(
- create_protocol,
- logger=logger,
- origin=origin,
- extensions=extensions,
- subprotocols=subprotocols,
- extra_headers=extra_headers,
- user_agent_header=user_agent_header,
- ping_interval=ping_interval,
- ping_timeout=ping_timeout,
- close_timeout=close_timeout,
- max_size=max_size,
- max_queue=max_queue,
- read_limit=read_limit,
- write_limit=write_limit,
- host=wsuri.host,
- port=wsuri.port,
- secure=wsuri.secure,
- legacy_recv=legacy_recv,
- loop=_loop,
- )
-
- if kwargs.pop("unix", False):
- path: Optional[str] = kwargs.pop("path", None)
- create_connection = functools.partial(
- loop.create_unix_connection, factory, path, **kwargs
- )
- else:
- host: Optional[str]
- port: Optional[int]
- if kwargs.get("sock") is None:
- host, port = wsuri.host, wsuri.port
- else:
- # If sock is given, host and port shouldn't be specified.
- host, port = None, None
- if kwargs.get("ssl"):
- kwargs.setdefault("server_hostname", wsuri.host)
- # If host and port are given, override values from the URI.
- host = kwargs.pop("host", host)
- port = kwargs.pop("port", port)
- create_connection = functools.partial(
- loop.create_connection, factory, host, port, **kwargs
- )
-
- self.open_timeout = open_timeout
- if logger is None:
- logger = logging.getLogger("websockets.client")
- self.logger = logger
-
- # This is a coroutine function.
- self._create_connection = create_connection
- self._uri = uri
- self._wsuri = wsuri
-
- def handle_redirect(self, uri: str) -> None:
- # Update the state of this instance to connect to a new URI.
- old_uri = self._uri
- old_wsuri = self._wsuri
- new_uri = urllib.parse.urljoin(old_uri, uri)
- new_wsuri = parse_uri(new_uri)
-
- # Forbid TLS downgrade.
- if old_wsuri.secure and not new_wsuri.secure:
- raise SecurityError("redirect from WSS to WS")
-
- same_origin = (
- old_wsuri.host == new_wsuri.host and old_wsuri.port == new_wsuri.port
- )
-
- # Rewrite the host and port arguments for cross-origin redirects.
- # This preserves connection overrides with the host and port
- # arguments if the redirect points to the same host and port.
- if not same_origin:
- # Replace the host and port argument passed to the protocol factory.
- factory = self._create_connection.args[0]
- factory = functools.partial(
- factory.func,
- *factory.args,
- **dict(factory.keywords, host=new_wsuri.host, port=new_wsuri.port),
- )
- # Replace the host and port argument passed to create_connection.
- self._create_connection = functools.partial(
- self._create_connection.func,
- *(factory, new_wsuri.host, new_wsuri.port),
- **self._create_connection.keywords,
- )
-
- # Set the new WebSocket URI. This suffices for same-origin redirects.
- self._uri = new_uri
- self._wsuri = new_wsuri
-
- # async for ... in connect(...):
-
- BACKOFF_MIN = 1.92
- BACKOFF_MAX = 60.0
- BACKOFF_FACTOR = 1.618
- BACKOFF_INITIAL = 5
-
- async def __aiter__(self) -> AsyncIterator[WebSocketClientProtocol]:
- backoff_delay = self.BACKOFF_MIN
- while True:
- try:
- async with self as protocol:
- yield protocol
- except Exception:
- # Add a random initial delay between 0 and 5 seconds.
- # See 7.2.3. Recovering from Abnormal Closure in RFC 6544.
- if backoff_delay == self.BACKOFF_MIN:
- initial_delay = random.random() * self.BACKOFF_INITIAL
- self.logger.info(
- "! connect failed; reconnecting in %.1f seconds",
- initial_delay,
- exc_info=True,
- )
- await asyncio.sleep(initial_delay)
- else:
- self.logger.info(
- "! connect failed again; retrying in %d seconds",
- int(backoff_delay),
- exc_info=True,
- )
- await asyncio.sleep(int(backoff_delay))
- # Increase delay with truncated exponential backoff.
- backoff_delay = backoff_delay * self.BACKOFF_FACTOR
- backoff_delay = min(backoff_delay, self.BACKOFF_MAX)
- continue
- else:
- # Connection succeeded - reset backoff delay
- backoff_delay = self.BACKOFF_MIN
-
- # async with connect(...) as ...:
-
- async def __aenter__(self) -> WebSocketClientProtocol:
- return await self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_value: Optional[BaseException],
- traceback: Optional[TracebackType],
- ) -> None:
- await self.protocol.close()
-
- # ... = await connect(...)
-
- def __await__(self) -> Generator[Any, None, WebSocketClientProtocol]:
- # Create a suitable iterator by calling __await__ on a coroutine.
- return self.__await_impl_timeout__().__await__()
-
- async def __await_impl_timeout__(self) -> WebSocketClientProtocol:
- async with asyncio_timeout(self.open_timeout):
- return await self.__await_impl__()
-
- async def __await_impl__(self) -> WebSocketClientProtocol:
- for redirects in range(self.MAX_REDIRECTS_ALLOWED):
- _transport, _protocol = await self._create_connection()
- protocol = cast(WebSocketClientProtocol, _protocol)
- try:
- await protocol.handshake(
- self._wsuri,
- origin=protocol.origin,
- available_extensions=protocol.available_extensions,
- available_subprotocols=protocol.available_subprotocols,
- extra_headers=protocol.extra_headers,
- )
- except RedirectHandshake as exc:
- protocol.fail_connection()
- await protocol.wait_closed()
- self.handle_redirect(exc.uri)
- # Avoid leaking a connected socket when the handshake fails.
- except (Exception, asyncio.CancelledError):
- protocol.fail_connection()
- await protocol.wait_closed()
- raise
- else:
- self.protocol = protocol
- return protocol
- else:
- raise SecurityError("too many redirects")
-
- # ... = yield from connect(...) - remove when dropping Python < 3.10
-
- __iter__ = __await__
-
-
-connect = Connect
-
-
-def unix_connect(
- path: Optional[str] = None,
- uri: str = "ws://localhost/",
- **kwargs: Any,
-) -> Connect:
- """
- Similar to :func:`connect`, but for connecting to a Unix socket.
-
- This function builds upon the event loop's
- :meth:`~asyncio.loop.create_unix_connection` method.
-
- It is only available on Unix.
-
- It's mainly useful for debugging servers listening on Unix sockets.
-
- Args:
- path: File system path to the Unix socket.
- uri: URI of the WebSocket server; the host is used in the TLS
- handshake for secure connections and in the ``Host`` header.
-
- """
- return connect(uri=uri, path=path, unix=True, **kwargs)
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/compatibility.py b/venv/lib/python3.11/site-packages/websockets/legacy/compatibility.py
deleted file mode 100644
index 6bd01e7..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/compatibility.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from __future__ import annotations
-
-import sys
-
-
-__all__ = ["asyncio_timeout"]
-
-
-if sys.version_info[:2] >= (3, 11):
- from asyncio import timeout as asyncio_timeout # noqa: F401
-else:
- from .async_timeout import timeout as asyncio_timeout # noqa: F401
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/framing.py b/venv/lib/python3.11/site-packages/websockets/legacy/framing.py
deleted file mode 100644
index b77b869..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/framing.py
+++ /dev/null
@@ -1,176 +0,0 @@
-from __future__ import annotations
-
-import struct
-from typing import Any, Awaitable, Callable, NamedTuple, Optional, Sequence, Tuple
-
-from .. import extensions, frames
-from ..exceptions import PayloadTooBig, ProtocolError
-
-
-try:
- from ..speedups import apply_mask
-except ImportError:
- from ..utils import apply_mask
-
-
-class Frame(NamedTuple):
- fin: bool
- opcode: frames.Opcode
- data: bytes
- rsv1: bool = False
- rsv2: bool = False
- rsv3: bool = False
-
- @property
- def new_frame(self) -> frames.Frame:
- return frames.Frame(
- self.opcode,
- self.data,
- self.fin,
- self.rsv1,
- self.rsv2,
- self.rsv3,
- )
-
- def __str__(self) -> str:
- return str(self.new_frame)
-
- def check(self) -> None:
- return self.new_frame.check()
-
- @classmethod
- async def read(
- cls,
- reader: Callable[[int], Awaitable[bytes]],
- *,
- mask: bool,
- max_size: Optional[int] = None,
- extensions: Optional[Sequence[extensions.Extension]] = None,
- ) -> Frame:
- """
- Read a WebSocket frame.
-
- Args:
- reader: Coroutine that reads exactly the requested number of
- bytes, unless the end of file is reached.
- mask: Whether the frame should be masked i.e. whether the read
- happens on the server side.
- max_size: Maximum payload size in bytes.
- extensions: List of extensions, applied in reverse order.
-
- Raises:
- PayloadTooBig: If the frame exceeds ``max_size``.
- ProtocolError: If the frame contains incorrect values.
-
- """
-
- # Read the header.
- data = await reader(2)
- head1, head2 = struct.unpack("!BB", data)
-
- # While not Pythonic, this is marginally faster than calling bool().
- fin = True if head1 & 0b10000000 else False
- rsv1 = True if head1 & 0b01000000 else False
- rsv2 = True if head1 & 0b00100000 else False
- rsv3 = True if head1 & 0b00010000 else False
-
- try:
- opcode = frames.Opcode(head1 & 0b00001111)
- except ValueError as exc:
- raise ProtocolError("invalid opcode") from exc
-
- if (True if head2 & 0b10000000 else False) != mask:
- raise ProtocolError("incorrect masking")
-
- length = head2 & 0b01111111
- if length == 126:
- data = await reader(2)
- (length,) = struct.unpack("!H", data)
- elif length == 127:
- data = await reader(8)
- (length,) = struct.unpack("!Q", data)
- if max_size is not None and length > max_size:
- raise PayloadTooBig(f"over size limit ({length} > {max_size} bytes)")
- if mask:
- mask_bits = await reader(4)
-
- # Read the data.
- data = await reader(length)
- if mask:
- data = apply_mask(data, mask_bits)
-
- new_frame = frames.Frame(opcode, data, fin, rsv1, rsv2, rsv3)
-
- if extensions is None:
- extensions = []
- for extension in reversed(extensions):
- new_frame = extension.decode(new_frame, max_size=max_size)
-
- new_frame.check()
-
- return cls(
- new_frame.fin,
- new_frame.opcode,
- new_frame.data,
- new_frame.rsv1,
- new_frame.rsv2,
- new_frame.rsv3,
- )
-
- def write(
- self,
- write: Callable[[bytes], Any],
- *,
- mask: bool,
- extensions: Optional[Sequence[extensions.Extension]] = None,
- ) -> None:
- """
- Write a WebSocket frame.
-
- Args:
- frame: Frame to write.
- write: Function that writes bytes.
- mask: Whether the frame should be masked i.e. whether the write
- happens on the client side.
- extensions: List of extensions, applied in order.
-
- Raises:
- ProtocolError: If the frame contains incorrect values.
-
- """
- # The frame is written in a single call to write in order to prevent
- # TCP fragmentation. See #68 for details. This also makes it safe to
- # send frames concurrently from multiple coroutines.
- write(self.new_frame.serialize(mask=mask, extensions=extensions))
-
-
-# Backwards compatibility with previously documented public APIs
-from ..frames import ( # noqa: E402, F401, I001
- Close,
- prepare_ctrl as encode_data,
- prepare_data,
-)
-
-
-def parse_close(data: bytes) -> Tuple[int, str]:
- """
- Parse the payload from a close frame.
-
- Returns:
- Close code and reason.
-
- Raises:
- ProtocolError: If data is ill-formed.
- UnicodeDecodeError: If the reason isn't valid UTF-8.
-
- """
- close = Close.parse(data)
- return close.code, close.reason
-
-
-def serialize_close(code: int, reason: str) -> bytes:
- """
- Serialize the payload for a close frame.
-
- """
- return Close(code, reason).serialize()
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/handshake.py b/venv/lib/python3.11/site-packages/websockets/legacy/handshake.py
deleted file mode 100644
index ad8faf0..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/handshake.py
+++ /dev/null
@@ -1,165 +0,0 @@
-from __future__ import annotations
-
-import base64
-import binascii
-from typing import List
-
-from ..datastructures import Headers, MultipleValuesError
-from ..exceptions import InvalidHeader, InvalidHeaderValue, InvalidUpgrade
-from ..headers import parse_connection, parse_upgrade
-from ..typing import ConnectionOption, UpgradeProtocol
-from ..utils import accept_key as accept, generate_key
-
-
-__all__ = ["build_request", "check_request", "build_response", "check_response"]
-
-
-def build_request(headers: Headers) -> str:
- """
- Build a handshake request to send to the server.
-
- Update request headers passed in argument.
-
- Args:
- headers: Handshake request headers.
-
- Returns:
- str: ``key`` that must be passed to :func:`check_response`.
-
- """
- key = generate_key()
- headers["Upgrade"] = "websocket"
- headers["Connection"] = "Upgrade"
- headers["Sec-WebSocket-Key"] = key
- headers["Sec-WebSocket-Version"] = "13"
- return key
-
-
-def check_request(headers: Headers) -> str:
- """
- Check a handshake request received from the client.
-
- This function doesn't verify that the request is an HTTP/1.1 or higher GET
- request and doesn't perform ``Host`` and ``Origin`` checks. These controls
- are usually performed earlier in the HTTP request handling code. They're
- the responsibility of the caller.
-
- Args:
- headers: Handshake request headers.
-
- Returns:
- str: ``key`` that must be passed to :func:`build_response`.
-
- Raises:
- InvalidHandshake: If the handshake request is invalid.
- Then, the server must return a 400 Bad Request error.
-
- """
- connection: List[ConnectionOption] = sum(
- [parse_connection(value) for value in headers.get_all("Connection")], []
- )
-
- if not any(value.lower() == "upgrade" for value in connection):
- raise InvalidUpgrade("Connection", ", ".join(connection))
-
- upgrade: List[UpgradeProtocol] = sum(
- [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
- )
-
- # For compatibility with non-strict implementations, ignore case when
- # checking the Upgrade header. The RFC always uses "websocket", except
- # in section 11.2. (IANA registration) where it uses "WebSocket".
- if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
- raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
-
- try:
- s_w_key = headers["Sec-WebSocket-Key"]
- except KeyError as exc:
- raise InvalidHeader("Sec-WebSocket-Key") from exc
- except MultipleValuesError as exc:
- raise InvalidHeader(
- "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
- ) from exc
-
- try:
- raw_key = base64.b64decode(s_w_key.encode(), validate=True)
- except binascii.Error as exc:
- raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key) from exc
- if len(raw_key) != 16:
- raise InvalidHeaderValue("Sec-WebSocket-Key", s_w_key)
-
- try:
- s_w_version = headers["Sec-WebSocket-Version"]
- except KeyError as exc:
- raise InvalidHeader("Sec-WebSocket-Version") from exc
- except MultipleValuesError as exc:
- raise InvalidHeader(
- "Sec-WebSocket-Version", "more than one Sec-WebSocket-Version header found"
- ) from exc
-
- if s_w_version != "13":
- raise InvalidHeaderValue("Sec-WebSocket-Version", s_w_version)
-
- return s_w_key
-
-
-def build_response(headers: Headers, key: str) -> None:
- """
- Build a handshake response to send to the client.
-
- Update response headers passed in argument.
-
- Args:
- headers: Handshake response headers.
- key: Returned by :func:`check_request`.
-
- """
- headers["Upgrade"] = "websocket"
- headers["Connection"] = "Upgrade"
- headers["Sec-WebSocket-Accept"] = accept(key)
-
-
-def check_response(headers: Headers, key: str) -> None:
- """
- Check a handshake response received from the server.
-
- This function doesn't verify that the response is an HTTP/1.1 or higher
- response with a 101 status code. These controls are the responsibility of
- the caller.
-
- Args:
- headers: Handshake response headers.
- key: Returned by :func:`build_request`.
-
- Raises:
- InvalidHandshake: If the handshake response is invalid.
-
- """
- connection: List[ConnectionOption] = sum(
- [parse_connection(value) for value in headers.get_all("Connection")], []
- )
-
- if not any(value.lower() == "upgrade" for value in connection):
- raise InvalidUpgrade("Connection", " ".join(connection))
-
- upgrade: List[UpgradeProtocol] = sum(
- [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
- )
-
- # For compatibility with non-strict implementations, ignore case when
- # checking the Upgrade header. The RFC always uses "websocket", except
- # in section 11.2. (IANA registration) where it uses "WebSocket".
- if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
- raise InvalidUpgrade("Upgrade", ", ".join(upgrade))
-
- try:
- s_w_accept = headers["Sec-WebSocket-Accept"]
- except KeyError as exc:
- raise InvalidHeader("Sec-WebSocket-Accept") from exc
- except MultipleValuesError as exc:
- raise InvalidHeader(
- "Sec-WebSocket-Accept", "more than one Sec-WebSocket-Accept header found"
- ) from exc
-
- if s_w_accept != accept(key):
- raise InvalidHeaderValue("Sec-WebSocket-Accept", s_w_accept)
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/http.py b/venv/lib/python3.11/site-packages/websockets/legacy/http.py
deleted file mode 100644
index 2ac7f70..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/http.py
+++ /dev/null
@@ -1,201 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-import re
-from typing import Tuple
-
-from ..datastructures import Headers
-from ..exceptions import SecurityError
-
-
-__all__ = ["read_request", "read_response"]
-
-MAX_HEADERS = 128
-MAX_LINE = 8192
-
-
-def d(value: bytes) -> str:
- """
- Decode a bytestring for interpolating into an error message.
-
- """
- return value.decode(errors="backslashreplace")
-
-
-# See https://www.rfc-editor.org/rfc/rfc7230.html#appendix-B.
-
-# Regex for validating header names.
-
-_token_re = re.compile(rb"[-!#$%&\'*+.^_`|~0-9a-zA-Z]+")
-
-# Regex for validating header values.
-
-# We don't attempt to support obsolete line folding.
-
-# Include HTAB (\x09), SP (\x20), VCHAR (\x21-\x7e), obs-text (\x80-\xff).
-
-# The ABNF is complicated because it attempts to express that optional
-# whitespace is ignored. We strip whitespace and don't revalidate that.
-
-# See also https://www.rfc-editor.org/errata_search.php?rfc=7230&eid=4189
-
-_value_re = re.compile(rb"[\x09\x20-\x7e\x80-\xff]*")
-
-
-async def read_request(stream: asyncio.StreamReader) -> Tuple[str, Headers]:
- """
- Read an HTTP/1.1 GET request and return ``(path, headers)``.
-
- ``path`` isn't URL-decoded or validated in any way.
-
- ``path`` and ``headers`` are expected to contain only ASCII characters.
- Other characters are represented with surrogate escapes.
-
- :func:`read_request` doesn't attempt to read the request body because
- WebSocket handshake requests don't have one. If the request contains a
- body, it may be read from ``stream`` after this coroutine returns.
-
- Args:
- stream: Input to read the request from.
-
- Raises:
- EOFError: If the connection is closed without a full HTTP request.
- SecurityError: If the request exceeds a security limit.
- ValueError: If the request isn't well formatted.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.1
-
- # Parsing is simple because fixed values are expected for method and
- # version and because path isn't checked. Since WebSocket software tends
- # to implement HTTP/1.1 strictly, there's little need for lenient parsing.
-
- try:
- request_line = await read_line(stream)
- except EOFError as exc:
- raise EOFError("connection closed while reading HTTP request line") from exc
-
- try:
- method, raw_path, version = request_line.split(b" ", 2)
- except ValueError: # not enough values to unpack (expected 3, got 1-2)
- raise ValueError(f"invalid HTTP request line: {d(request_line)}") from None
-
- if method != b"GET":
- raise ValueError(f"unsupported HTTP method: {d(method)}")
- if version != b"HTTP/1.1":
- raise ValueError(f"unsupported HTTP version: {d(version)}")
- path = raw_path.decode("ascii", "surrogateescape")
-
- headers = await read_headers(stream)
-
- return path, headers
-
-
-async def read_response(stream: asyncio.StreamReader) -> Tuple[int, str, Headers]:
- """
- Read an HTTP/1.1 response and return ``(status_code, reason, headers)``.
-
- ``reason`` and ``headers`` are expected to contain only ASCII characters.
- Other characters are represented with surrogate escapes.
-
- :func:`read_request` doesn't attempt to read the response body because
- WebSocket handshake responses don't have one. If the response contains a
- body, it may be read from ``stream`` after this coroutine returns.
-
- Args:
- stream: Input to read the response from.
-
- Raises:
- EOFError: If the connection is closed without a full HTTP response.
- SecurityError: If the response exceeds a security limit.
- ValueError: If the response isn't well formatted.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.1.2
-
- # As in read_request, parsing is simple because a fixed value is expected
- # for version, status_code is a 3-digit number, and reason can be ignored.
-
- try:
- status_line = await read_line(stream)
- except EOFError as exc:
- raise EOFError("connection closed while reading HTTP status line") from exc
-
- try:
- version, raw_status_code, raw_reason = status_line.split(b" ", 2)
- except ValueError: # not enough values to unpack (expected 3, got 1-2)
- raise ValueError(f"invalid HTTP status line: {d(status_line)}") from None
-
- if version != b"HTTP/1.1":
- raise ValueError(f"unsupported HTTP version: {d(version)}")
- try:
- status_code = int(raw_status_code)
- except ValueError: # invalid literal for int() with base 10
- raise ValueError(f"invalid HTTP status code: {d(raw_status_code)}") from None
- if not 100 <= status_code < 1000:
- raise ValueError(f"unsupported HTTP status code: {d(raw_status_code)}")
- if not _value_re.fullmatch(raw_reason):
- raise ValueError(f"invalid HTTP reason phrase: {d(raw_reason)}")
- reason = raw_reason.decode()
-
- headers = await read_headers(stream)
-
- return status_code, reason, headers
-
-
-async def read_headers(stream: asyncio.StreamReader) -> Headers:
- """
- Read HTTP headers from ``stream``.
-
- Non-ASCII characters are represented with surrogate escapes.
-
- """
- # https://www.rfc-editor.org/rfc/rfc7230.html#section-3.2
-
- # We don't attempt to support obsolete line folding.
-
- headers = Headers()
- for _ in range(MAX_HEADERS + 1):
- try:
- line = await read_line(stream)
- except EOFError as exc:
- raise EOFError("connection closed while reading HTTP headers") from exc
- if line == b"":
- break
-
- try:
- raw_name, raw_value = line.split(b":", 1)
- except ValueError: # not enough values to unpack (expected 2, got 1)
- raise ValueError(f"invalid HTTP header line: {d(line)}") from None
- if not _token_re.fullmatch(raw_name):
- raise ValueError(f"invalid HTTP header name: {d(raw_name)}")
- raw_value = raw_value.strip(b" \t")
- if not _value_re.fullmatch(raw_value):
- raise ValueError(f"invalid HTTP header value: {d(raw_value)}")
-
- name = raw_name.decode("ascii") # guaranteed to be ASCII at this point
- value = raw_value.decode("ascii", "surrogateescape")
- headers[name] = value
-
- else:
- raise SecurityError("too many HTTP headers")
-
- return headers
-
-
-async def read_line(stream: asyncio.StreamReader) -> bytes:
- """
- Read a single line from ``stream``.
-
- CRLF is stripped from the return value.
-
- """
- # Security: this is bounded by the StreamReader's limit (default = 32 KiB).
- line = await stream.readline()
- # Security: this guarantees header values are small (hard-coded = 8 KiB)
- if len(line) > MAX_LINE:
- raise SecurityError("line too long")
- # Not mandatory but safe - https://www.rfc-editor.org/rfc/rfc7230.html#section-3.5
- if not line.endswith(b"\r\n"):
- raise EOFError("line without CRLF")
- return line[:-2]
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/protocol.py b/venv/lib/python3.11/site-packages/websockets/legacy/protocol.py
deleted file mode 100644
index 19cee0e..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/protocol.py
+++ /dev/null
@@ -1,1645 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-import codecs
-import collections
-import logging
-import random
-import ssl
-import struct
-import sys
-import time
-import uuid
-import warnings
-from typing import (
- Any,
- AsyncIterable,
- AsyncIterator,
- Awaitable,
- Callable,
- Deque,
- Dict,
- Iterable,
- List,
- Mapping,
- Optional,
- Tuple,
- Union,
- cast,
-)
-
-from ..datastructures import Headers
-from ..exceptions import (
- ConnectionClosed,
- ConnectionClosedError,
- ConnectionClosedOK,
- InvalidState,
- PayloadTooBig,
- ProtocolError,
-)
-from ..extensions import Extension
-from ..frames import (
- OK_CLOSE_CODES,
- OP_BINARY,
- OP_CLOSE,
- OP_CONT,
- OP_PING,
- OP_PONG,
- OP_TEXT,
- Close,
- CloseCode,
- Opcode,
- prepare_ctrl,
- prepare_data,
-)
-from ..protocol import State
-from ..typing import Data, LoggerLike, Subprotocol
-from .compatibility import asyncio_timeout
-from .framing import Frame
-
-
-__all__ = ["WebSocketCommonProtocol", "broadcast"]
-
-
-# In order to ensure consistency, the code always checks the current value of
-# WebSocketCommonProtocol.state before assigning a new value and never yields
-# between the check and the assignment.
-
-
-class WebSocketCommonProtocol(asyncio.Protocol):
- """
- WebSocket connection.
-
- :class:`WebSocketCommonProtocol` provides APIs shared between WebSocket
- servers and clients. You shouldn't use it directly. Instead, use
- :class:`~websockets.client.WebSocketClientProtocol` or
- :class:`~websockets.server.WebSocketServerProtocol`.
-
- This documentation focuses on low-level details that aren't covered in the
- documentation of :class:`~websockets.client.WebSocketClientProtocol` and
- :class:`~websockets.server.WebSocketServerProtocol` for the sake of
- simplicity.
-
- Once the connection is open, a Ping_ frame is sent every ``ping_interval``
- seconds. This serves as a keepalive. It helps keeping the connection open,
- especially in the presence of proxies with short timeouts on inactive
- connections. Set ``ping_interval`` to :obj:`None` to disable this behavior.
-
- .. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2
-
- If the corresponding Pong_ frame isn't received within ``ping_timeout``
- seconds, the connection is considered unusable and is closed with code 1011.
- This ensures that the remote endpoint remains responsive. Set
- ``ping_timeout`` to :obj:`None` to disable this behavior.
-
- .. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3
-
- See the discussion of :doc:`timeouts <../../topics/timeouts>` for details.
-
- The ``close_timeout`` parameter defines a maximum wait time for completing
- the closing handshake and terminating the TCP connection. For legacy
- reasons, :meth:`close` completes in at most ``5 * close_timeout`` seconds
- for clients and ``4 * close_timeout`` for servers.
-
- ``close_timeout`` is a parameter of the protocol because websockets usually
- calls :meth:`close` implicitly upon exit:
-
- * on the client side, when using :func:`~websockets.client.connect` as a
- context manager;
- * on the server side, when the connection handler terminates.
-
- To apply a timeout to any other API, wrap it in :func:`~asyncio.timeout` or
- :func:`~asyncio.wait_for`.
-
- The ``max_size`` parameter enforces the maximum size for incoming messages
- in bytes. The default value is 1 MiB. If a larger message is received,
- :meth:`recv` will raise :exc:`~websockets.exceptions.ConnectionClosedError`
- and the connection will be closed with code 1009.
-
- The ``max_queue`` parameter sets the maximum length of the queue that
- holds incoming messages. The default value is ``32``. Messages are added
- to an in-memory queue when they're received; then :meth:`recv` pops from
- that queue. In order to prevent excessive memory consumption when
- messages are received faster than they can be processed, the queue must
- be bounded. If the queue fills up, the protocol stops processing incoming
- data until :meth:`recv` is called. In this situation, various receive
- buffers (at least in :mod:`asyncio` and in the OS) will fill up, then the
- TCP receive window will shrink, slowing down transmission to avoid packet
- loss.
-
- Since Python can use up to 4 bytes of memory to represent a single
- character, each connection may use up to ``4 * max_size * max_queue``
- bytes of memory to store incoming messages. By default, this is 128 MiB.
- You may want to lower the limits, depending on your application's
- requirements.
-
- The ``read_limit`` argument sets the high-water limit of the buffer for
- incoming bytes. The low-water limit is half the high-water limit. The
- default value is 64 KiB, half of asyncio's default (based on the current
- implementation of :class:`~asyncio.StreamReader`).
-
- The ``write_limit`` argument sets the high-water limit of the buffer for
- outgoing bytes. The low-water limit is a quarter of the high-water limit.
- The default value is 64 KiB, equal to asyncio's default (based on the
- current implementation of ``FlowControlMixin``).
-
- See the discussion of :doc:`memory usage <../../topics/memory>` for details.
-
- Args:
- logger: Logger for this server.
- It defaults to ``logging.getLogger("websockets.protocol")``.
- See the :doc:`logging guide <../../topics/logging>` for details.
- ping_interval: Delay between keepalive pings in seconds.
- :obj:`None` disables keepalive pings.
- ping_timeout: Timeout for keepalive pings in seconds.
- :obj:`None` disables timeouts.
- close_timeout: Timeout for closing the connection in seconds.
- For legacy reasons, the actual timeout is 4 or 5 times larger.
- max_size: Maximum size of incoming messages in bytes.
- :obj:`None` disables the limit.
- max_queue: Maximum number of incoming messages in receive buffer.
- :obj:`None` disables the limit.
- read_limit: High-water mark of read buffer in bytes.
- write_limit: High-water mark of write buffer in bytes.
-
- """
-
- # There are only two differences between the client-side and server-side
- # behavior: masking the payload and closing the underlying TCP connection.
- # Set is_client = True/False and side = "client"/"server" to pick a side.
- is_client: bool
- side: str = "undefined"
-
- def __init__(
- self,
- *,
- logger: Optional[LoggerLike] = None,
- ping_interval: Optional[float] = 20,
- ping_timeout: Optional[float] = 20,
- close_timeout: Optional[float] = None,
- max_size: Optional[int] = 2**20,
- max_queue: Optional[int] = 2**5,
- read_limit: int = 2**16,
- write_limit: int = 2**16,
- # The following arguments are kept only for backwards compatibility.
- host: Optional[str] = None,
- port: Optional[int] = None,
- secure: Optional[bool] = None,
- legacy_recv: bool = False,
- loop: Optional[asyncio.AbstractEventLoop] = None,
- timeout: Optional[float] = None,
- ) -> None:
- if legacy_recv: # pragma: no cover
- warnings.warn("legacy_recv is deprecated", DeprecationWarning)
-
- # Backwards compatibility: close_timeout used to be called timeout.
- if timeout is None:
- timeout = 10
- else:
- warnings.warn("rename timeout to close_timeout", DeprecationWarning)
- # If both are specified, timeout is ignored.
- if close_timeout is None:
- close_timeout = timeout
-
- # Backwards compatibility: the loop parameter used to be supported.
- if loop is None:
- loop = asyncio.get_event_loop()
- else:
- warnings.warn("remove loop argument", DeprecationWarning)
-
- self.ping_interval = ping_interval
- self.ping_timeout = ping_timeout
- self.close_timeout = close_timeout
- self.max_size = max_size
- self.max_queue = max_queue
- self.read_limit = read_limit
- self.write_limit = write_limit
-
- # Unique identifier. For logs.
- self.id: uuid.UUID = uuid.uuid4()
- """Unique identifier of the connection. Useful in logs."""
-
- # Logger or LoggerAdapter for this connection.
- if logger is None:
- logger = logging.getLogger("websockets.protocol")
- self.logger: LoggerLike = logging.LoggerAdapter(logger, {"websocket": self})
- """Logger for this connection."""
-
- # Track if DEBUG is enabled. Shortcut logging calls if it isn't.
- self.debug = logger.isEnabledFor(logging.DEBUG)
-
- self.loop = loop
-
- self._host = host
- self._port = port
- self._secure = secure
- self.legacy_recv = legacy_recv
-
- # Configure read buffer limits. The high-water limit is defined by
- # ``self.read_limit``. The ``limit`` argument controls the line length
- # limit and half the buffer limit of :class:`~asyncio.StreamReader`.
- # That's why it must be set to half of ``self.read_limit``.
- self.reader = asyncio.StreamReader(limit=read_limit // 2, loop=loop)
-
- # Copied from asyncio.FlowControlMixin
- self._paused = False
- self._drain_waiter: Optional[asyncio.Future[None]] = None
-
- self._drain_lock = asyncio.Lock()
-
- # This class implements the data transfer and closing handshake, which
- # are shared between the client-side and the server-side.
- # Subclasses implement the opening handshake and, on success, execute
- # :meth:`connection_open` to change the state to OPEN.
- self.state = State.CONNECTING
- if self.debug:
- self.logger.debug("= connection is CONNECTING")
-
- # HTTP protocol parameters.
- self.path: str
- """Path of the opening handshake request."""
- self.request_headers: Headers
- """Opening handshake request headers."""
- self.response_headers: Headers
- """Opening handshake response headers."""
-
- # WebSocket protocol parameters.
- self.extensions: List[Extension] = []
- self.subprotocol: Optional[Subprotocol] = None
- """Subprotocol, if one was negotiated."""
-
- # Close code and reason, set when a close frame is sent or received.
- self.close_rcvd: Optional[Close] = None
- self.close_sent: Optional[Close] = None
- self.close_rcvd_then_sent: Optional[bool] = None
-
- # Completed when the connection state becomes CLOSED. Translates the
- # :meth:`connection_lost` callback to a :class:`~asyncio.Future`
- # that can be awaited. (Other :class:`~asyncio.Protocol` callbacks are
- # translated by ``self.stream_reader``).
- self.connection_lost_waiter: asyncio.Future[None] = loop.create_future()
-
- # Queue of received messages.
- self.messages: Deque[Data] = collections.deque()
- self._pop_message_waiter: Optional[asyncio.Future[None]] = None
- self._put_message_waiter: Optional[asyncio.Future[None]] = None
-
- # Protect sending fragmented messages.
- self._fragmented_message_waiter: Optional[asyncio.Future[None]] = None
-
- # Mapping of ping IDs to pong waiters, in chronological order.
- self.pings: Dict[bytes, Tuple[asyncio.Future[float], float]] = {}
-
- self.latency: float = 0
- """
- Latency of the connection, in seconds.
-
- This value is updated after sending a ping frame and receiving a
- matching pong frame. Before the first ping, :attr:`latency` is ``0``.
-
- By default, websockets enables a :ref:`keepalive <keepalive>` mechanism
- that sends ping frames automatically at regular intervals. You can also
- send ping frames and measure latency with :meth:`ping`.
- """
-
- # Task running the data transfer.
- self.transfer_data_task: asyncio.Task[None]
-
- # Exception that occurred during data transfer, if any.
- self.transfer_data_exc: Optional[BaseException] = None
-
- # Task sending keepalive pings.
- self.keepalive_ping_task: asyncio.Task[None]
-
- # Task closing the TCP connection.
- self.close_connection_task: asyncio.Task[None]
-
- # Copied from asyncio.FlowControlMixin
- async def _drain_helper(self) -> None: # pragma: no cover
- if self.connection_lost_waiter.done():
- raise ConnectionResetError("Connection lost")
- if not self._paused:
- return
- waiter = self._drain_waiter
- assert waiter is None or waiter.cancelled()
- waiter = self.loop.create_future()
- self._drain_waiter = waiter
- await waiter
-
- # Copied from asyncio.StreamWriter
- async def _drain(self) -> None: # pragma: no cover
- if self.reader is not None:
- exc = self.reader.exception()
- if exc is not None:
- raise exc
- if self.transport is not None:
- if self.transport.is_closing():
- # Yield to the event loop so connection_lost() may be
- # called. Without this, _drain_helper() would return
- # immediately, and code that calls
- # write(...); yield from drain()
- # in a loop would never call connection_lost(), so it
- # would not see an error when the socket is closed.
- await asyncio.sleep(0)
- await self._drain_helper()
-
- def connection_open(self) -> None:
- """
- Callback when the WebSocket opening handshake completes.
-
- Enter the OPEN state and start the data transfer phase.
-
- """
- # 4.1. The WebSocket Connection is Established.
- assert self.state is State.CONNECTING
- self.state = State.OPEN
- if self.debug:
- self.logger.debug("= connection is OPEN")
- # Start the task that receives incoming WebSocket messages.
- self.transfer_data_task = self.loop.create_task(self.transfer_data())
- # Start the task that sends pings at regular intervals.
- self.keepalive_ping_task = self.loop.create_task(self.keepalive_ping())
- # Start the task that eventually closes the TCP connection.
- self.close_connection_task = self.loop.create_task(self.close_connection())
-
- @property
- def host(self) -> Optional[str]:
- alternative = "remote_address" if self.is_client else "local_address"
- warnings.warn(f"use {alternative}[0] instead of host", DeprecationWarning)
- return self._host
-
- @property
- def port(self) -> Optional[int]:
- alternative = "remote_address" if self.is_client else "local_address"
- warnings.warn(f"use {alternative}[1] instead of port", DeprecationWarning)
- return self._port
-
- @property
- def secure(self) -> Optional[bool]:
- warnings.warn("don't use secure", DeprecationWarning)
- return self._secure
-
- # Public API
-
- @property
- def local_address(self) -> Any:
- """
- Local address of the connection.
-
- For IPv4 connections, this is a ``(host, port)`` tuple.
-
- The format of the address depends on the address family;
- see :meth:`~socket.socket.getsockname`.
-
- :obj:`None` if the TCP connection isn't established yet.
-
- """
- try:
- transport = self.transport
- except AttributeError:
- return None
- else:
- return transport.get_extra_info("sockname")
-
- @property
- def remote_address(self) -> Any:
- """
- Remote address of the connection.
-
- For IPv4 connections, this is a ``(host, port)`` tuple.
-
- The format of the address depends on the address family;
- see :meth:`~socket.socket.getpeername`.
-
- :obj:`None` if the TCP connection isn't established yet.
-
- """
- try:
- transport = self.transport
- except AttributeError:
- return None
- else:
- return transport.get_extra_info("peername")
-
- @property
- def open(self) -> bool:
- """
- :obj:`True` when the connection is open; :obj:`False` otherwise.
-
- This attribute may be used to detect disconnections. However, this
- approach is discouraged per the EAFP_ principle. Instead, you should
- handle :exc:`~websockets.exceptions.ConnectionClosed` exceptions.
-
- .. _EAFP: https://docs.python.org/3/glossary.html#term-eafp
-
- """
- return self.state is State.OPEN and not self.transfer_data_task.done()
-
- @property
- def closed(self) -> bool:
- """
- :obj:`True` when the connection is closed; :obj:`False` otherwise.
-
- Be aware that both :attr:`open` and :attr:`closed` are :obj:`False`
- during the opening and closing sequences.
-
- """
- return self.state is State.CLOSED
-
- @property
- def close_code(self) -> Optional[int]:
- """
- WebSocket close code, defined in `section 7.1.5 of RFC 6455`_.
-
- .. _section 7.1.5 of RFC 6455:
- https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5
-
- :obj:`None` if the connection isn't closed yet.
-
- """
- if self.state is not State.CLOSED:
- return None
- elif self.close_rcvd is None:
- return CloseCode.ABNORMAL_CLOSURE
- else:
- return self.close_rcvd.code
-
- @property
- def close_reason(self) -> Optional[str]:
- """
- WebSocket close reason, defined in `section 7.1.6 of RFC 6455`_.
-
- .. _section 7.1.6 of RFC 6455:
- https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.6
-
- :obj:`None` if the connection isn't closed yet.
-
- """
- if self.state is not State.CLOSED:
- return None
- elif self.close_rcvd is None:
- return ""
- else:
- return self.close_rcvd.reason
-
- async def __aiter__(self) -> AsyncIterator[Data]:
- """
- Iterate on incoming messages.
-
- The iterator exits normally when the connection is closed with the close
- code 1000 (OK) or 1001 (going away) or without a close code.
-
- It raises a :exc:`~websockets.exceptions.ConnectionClosedError`
- exception when the connection is closed with any other code.
-
- """
- try:
- while True:
- yield await self.recv()
- except ConnectionClosedOK:
- return
-
- async def recv(self) -> Data:
- """
- Receive the next message.
-
- When the connection is closed, :meth:`recv` raises
- :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it raises
- :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
- connection closure and
- :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
- error or a network failure. This is how you detect the end of the
- message stream.
-
- Canceling :meth:`recv` is safe. There's no risk of losing the next
- message. The next invocation of :meth:`recv` will return it.
-
- This makes it possible to enforce a timeout by wrapping :meth:`recv` in
- :func:`~asyncio.timeout` or :func:`~asyncio.wait_for`.
-
- Returns:
- Data: A string (:class:`str`) for a Text_ frame. A bytestring
- (:class:`bytes`) for a Binary_ frame.
-
- .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
- .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
- Raises:
- ConnectionClosed: When the connection is closed.
- RuntimeError: If two coroutines call :meth:`recv` concurrently.
-
- """
- if self._pop_message_waiter is not None:
- raise RuntimeError(
- "cannot call recv while another coroutine "
- "is already waiting for the next message"
- )
-
- # Don't await self.ensure_open() here:
- # - messages could be available in the queue even if the connection
- # is closed;
- # - messages could be received before the closing frame even if the
- # connection is closing.
-
- # Wait until there's a message in the queue (if necessary) or the
- # connection is closed.
- while len(self.messages) <= 0:
- pop_message_waiter: asyncio.Future[None] = self.loop.create_future()
- self._pop_message_waiter = pop_message_waiter
- try:
- # If asyncio.wait() is canceled, it doesn't cancel
- # pop_message_waiter and self.transfer_data_task.
- await asyncio.wait(
- [pop_message_waiter, self.transfer_data_task],
- return_when=asyncio.FIRST_COMPLETED,
- )
- finally:
- self._pop_message_waiter = None
-
- # If asyncio.wait(...) exited because self.transfer_data_task
- # completed before receiving a new message, raise a suitable
- # exception (or return None if legacy_recv is enabled).
- if not pop_message_waiter.done():
- if self.legacy_recv:
- return None # type: ignore
- else:
- # Wait until the connection is closed to raise
- # ConnectionClosed with the correct code and reason.
- await self.ensure_open()
-
- # Pop a message from the queue.
- message = self.messages.popleft()
-
- # Notify transfer_data().
- if self._put_message_waiter is not None:
- self._put_message_waiter.set_result(None)
- self._put_message_waiter = None
-
- return message
-
- async def send(
- self,
- message: Union[Data, Iterable[Data], AsyncIterable[Data]],
- ) -> None:
- """
- Send a message.
-
- A string (:class:`str`) is sent as a Text_ frame. A bytestring or
- bytes-like object (:class:`bytes`, :class:`bytearray`, or
- :class:`memoryview`) is sent as a Binary_ frame.
-
- .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
- .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
- :meth:`send` also accepts an iterable or an asynchronous iterable of
- strings, bytestrings, or bytes-like objects to enable fragmentation_.
- Each item is treated as a message fragment and sent in its own frame.
- All items must be of the same type, or else :meth:`send` will raise a
- :exc:`TypeError` and the connection will be closed.
-
- .. _fragmentation: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.4
-
- :meth:`send` rejects dict-like objects because this is often an error.
- (If you want to send the keys of a dict-like object as fragments, call
- its :meth:`~dict.keys` method and pass the result to :meth:`send`.)
-
- Canceling :meth:`send` is discouraged. Instead, you should close the
- connection with :meth:`close`. Indeed, there are only two situations
- where :meth:`send` may yield control to the event loop and then get
- canceled; in both cases, :meth:`close` has the same effect and is
- more clear:
-
- 1. The write buffer is full. If you don't want to wait until enough
- data is sent, your only alternative is to close the connection.
- :meth:`close` will likely time out then abort the TCP connection.
- 2. ``message`` is an asynchronous iterator that yields control.
- Stopping in the middle of a fragmented message will cause a
- protocol error and the connection will be closed.
-
- When the connection is closed, :meth:`send` raises
- :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it
- raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
- connection closure and
- :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
- error or a network failure.
-
- Args:
- message (Union[Data, Iterable[Data], AsyncIterable[Data]): message
- to send.
-
- Raises:
- ConnectionClosed: When the connection is closed.
- TypeError: If ``message`` doesn't have a supported type.
-
- """
- await self.ensure_open()
-
- # While sending a fragmented message, prevent sending other messages
- # until all fragments are sent.
- while self._fragmented_message_waiter is not None:
- await asyncio.shield(self._fragmented_message_waiter)
-
- # Unfragmented message -- this case must be handled first because
- # strings and bytes-like objects are iterable.
-
- if isinstance(message, (str, bytes, bytearray, memoryview)):
- opcode, data = prepare_data(message)
- await self.write_frame(True, opcode, data)
-
- # Catch a common mistake -- passing a dict to send().
-
- elif isinstance(message, Mapping):
- raise TypeError("data is a dict-like object")
-
- # Fragmented message -- regular iterator.
-
- elif isinstance(message, Iterable):
- # Work around https://github.com/python/mypy/issues/6227
- message = cast(Iterable[Data], message)
-
- iter_message = iter(message)
- try:
- fragment = next(iter_message)
- except StopIteration:
- return
- opcode, data = prepare_data(fragment)
-
- self._fragmented_message_waiter = asyncio.Future()
- try:
- # First fragment.
- await self.write_frame(False, opcode, data)
-
- # Other fragments.
- for fragment in iter_message:
- confirm_opcode, data = prepare_data(fragment)
- if confirm_opcode != opcode:
- raise TypeError("data contains inconsistent types")
- await self.write_frame(False, OP_CONT, data)
-
- # Final fragment.
- await self.write_frame(True, OP_CONT, b"")
-
- except (Exception, asyncio.CancelledError):
- # We're half-way through a fragmented message and we can't
- # complete it. This makes the connection unusable.
- self.fail_connection(CloseCode.INTERNAL_ERROR)
- raise
-
- finally:
- self._fragmented_message_waiter.set_result(None)
- self._fragmented_message_waiter = None
-
- # Fragmented message -- asynchronous iterator
-
- elif isinstance(message, AsyncIterable):
- # Implement aiter_message = aiter(message) without aiter
- # Work around https://github.com/python/mypy/issues/5738
- aiter_message = cast(
- Callable[[AsyncIterable[Data]], AsyncIterator[Data]],
- type(message).__aiter__,
- )(message)
- try:
- # Implement fragment = anext(aiter_message) without anext
- # Work around https://github.com/python/mypy/issues/5738
- fragment = await cast(
- Callable[[AsyncIterator[Data]], Awaitable[Data]],
- type(aiter_message).__anext__,
- )(aiter_message)
- except StopAsyncIteration:
- return
- opcode, data = prepare_data(fragment)
-
- self._fragmented_message_waiter = asyncio.Future()
- try:
- # First fragment.
- await self.write_frame(False, opcode, data)
-
- # Other fragments.
- async for fragment in aiter_message:
- confirm_opcode, data = prepare_data(fragment)
- if confirm_opcode != opcode:
- raise TypeError("data contains inconsistent types")
- await self.write_frame(False, OP_CONT, data)
-
- # Final fragment.
- await self.write_frame(True, OP_CONT, b"")
-
- except (Exception, asyncio.CancelledError):
- # We're half-way through a fragmented message and we can't
- # complete it. This makes the connection unusable.
- self.fail_connection(CloseCode.INTERNAL_ERROR)
- raise
-
- finally:
- self._fragmented_message_waiter.set_result(None)
- self._fragmented_message_waiter = None
-
- else:
- raise TypeError("data must be str, bytes-like, or iterable")
-
- async def close(
- self,
- code: int = CloseCode.NORMAL_CLOSURE,
- reason: str = "",
- ) -> None:
- """
- Perform the closing handshake.
-
- :meth:`close` waits for the other end to complete the handshake and
- for the TCP connection to terminate. As a consequence, there's no need
- to await :meth:`wait_closed` after :meth:`close`.
-
- :meth:`close` is idempotent: it doesn't do anything once the
- connection is closed.
-
- Wrapping :func:`close` in :func:`~asyncio.create_task` is safe, given
- that errors during connection termination aren't particularly useful.
-
- Canceling :meth:`close` is discouraged. If it takes too long, you can
- set a shorter ``close_timeout``. If you don't want to wait, let the
- Python process exit, then the OS will take care of closing the TCP
- connection.
-
- Args:
- code: WebSocket close code.
- reason: WebSocket close reason.
-
- """
- try:
- async with asyncio_timeout(self.close_timeout):
- await self.write_close_frame(Close(code, reason))
- except asyncio.TimeoutError:
- # If the close frame cannot be sent because the send buffers
- # are full, the closing handshake won't complete anyway.
- # Fail the connection to shut down faster.
- self.fail_connection()
-
- # If no close frame is received within the timeout, asyncio_timeout()
- # cancels the data transfer task and raises TimeoutError.
-
- # If close() is called multiple times concurrently and one of these
- # calls hits the timeout, the data transfer task will be canceled.
- # Other calls will receive a CancelledError here.
-
- try:
- # If close() is canceled during the wait, self.transfer_data_task
- # is canceled before the timeout elapses.
- async with asyncio_timeout(self.close_timeout):
- await self.transfer_data_task
- except (asyncio.TimeoutError, asyncio.CancelledError):
- pass
-
- # Wait for the close connection task to close the TCP connection.
- await asyncio.shield(self.close_connection_task)
-
- async def wait_closed(self) -> None:
- """
- Wait until the connection is closed.
-
- This coroutine is identical to the :attr:`closed` attribute, except it
- can be awaited.
-
- This can make it easier to detect connection termination, regardless
- of its cause, in tasks that interact with the WebSocket connection.
-
- """
- await asyncio.shield(self.connection_lost_waiter)
-
- async def ping(self, data: Optional[Data] = None) -> Awaitable[None]:
- """
- Send a Ping_.
-
- .. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2
-
- A ping may serve as a keepalive, as a check that the remote endpoint
- received all messages up to this point, or to measure :attr:`latency`.
-
- Canceling :meth:`ping` is discouraged. If :meth:`ping` doesn't return
- immediately, it means the write buffer is full. If you don't want to
- wait, you should close the connection.
-
- Canceling the :class:`~asyncio.Future` returned by :meth:`ping` has no
- effect.
-
- Args:
- data (Optional[Data]): payload of the ping; a string will be
- encoded to UTF-8; or :obj:`None` to generate a payload
- containing four random bytes.
-
- Returns:
- ~asyncio.Future[float]: A future that will be completed when the
- corresponding pong is received. You can ignore it if you don't
- intend to wait. The result of the future is the latency of the
- connection in seconds.
-
- ::
-
- pong_waiter = await ws.ping()
- # only if you want to wait for the corresponding pong
- latency = await pong_waiter
-
- Raises:
- ConnectionClosed: When the connection is closed.
- RuntimeError: If another ping was sent with the same data and
- the corresponding pong wasn't received yet.
-
- """
- await self.ensure_open()
-
- if data is not None:
- data = prepare_ctrl(data)
-
- # Protect against duplicates if a payload is explicitly set.
- if data in self.pings:
- raise RuntimeError("already waiting for a pong with the same data")
-
- # Generate a unique random payload otherwise.
- while data is None or data in self.pings:
- data = struct.pack("!I", random.getrandbits(32))
-
- pong_waiter = self.loop.create_future()
- # Resolution of time.monotonic() may be too low on Windows.
- ping_timestamp = time.perf_counter()
- self.pings[data] = (pong_waiter, ping_timestamp)
-
- await self.write_frame(True, OP_PING, data)
-
- return asyncio.shield(pong_waiter)
-
- async def pong(self, data: Data = b"") -> None:
- """
- Send a Pong_.
-
- .. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3
-
- An unsolicited pong may serve as a unidirectional heartbeat.
-
- Canceling :meth:`pong` is discouraged. If :meth:`pong` doesn't return
- immediately, it means the write buffer is full. If you don't want to
- wait, you should close the connection.
-
- Args:
- data (Data): Payload of the pong. A string will be encoded to
- UTF-8.
-
- Raises:
- ConnectionClosed: When the connection is closed.
-
- """
- await self.ensure_open()
-
- data = prepare_ctrl(data)
-
- await self.write_frame(True, OP_PONG, data)
-
- # Private methods - no guarantees.
-
- def connection_closed_exc(self) -> ConnectionClosed:
- exc: ConnectionClosed
- if (
- self.close_rcvd is not None
- and self.close_rcvd.code in OK_CLOSE_CODES
- and self.close_sent is not None
- and self.close_sent.code in OK_CLOSE_CODES
- ):
- exc = ConnectionClosedOK(
- self.close_rcvd,
- self.close_sent,
- self.close_rcvd_then_sent,
- )
- else:
- exc = ConnectionClosedError(
- self.close_rcvd,
- self.close_sent,
- self.close_rcvd_then_sent,
- )
- # Chain to the exception that terminated data transfer, if any.
- exc.__cause__ = self.transfer_data_exc
- return exc
-
- async def ensure_open(self) -> None:
- """
- Check that the WebSocket connection is open.
-
- Raise :exc:`~websockets.exceptions.ConnectionClosed` if it isn't.
-
- """
- # Handle cases from most common to least common for performance.
- if self.state is State.OPEN:
- # If self.transfer_data_task exited without a closing handshake,
- # self.close_connection_task may be closing the connection, going
- # straight from OPEN to CLOSED.
- if self.transfer_data_task.done():
- await asyncio.shield(self.close_connection_task)
- raise self.connection_closed_exc()
- else:
- return
-
- if self.state is State.CLOSED:
- raise self.connection_closed_exc()
-
- if self.state is State.CLOSING:
- # If we started the closing handshake, wait for its completion to
- # get the proper close code and reason. self.close_connection_task
- # will complete within 4 or 5 * close_timeout after close(). The
- # CLOSING state also occurs when failing the connection. In that
- # case self.close_connection_task will complete even faster.
- await asyncio.shield(self.close_connection_task)
- raise self.connection_closed_exc()
-
- # Control may only reach this point in buggy third-party subclasses.
- assert self.state is State.CONNECTING
- raise InvalidState("WebSocket connection isn't established yet")
-
- async def transfer_data(self) -> None:
- """
- Read incoming messages and put them in a queue.
-
- This coroutine runs in a task until the closing handshake is started.
-
- """
- try:
- while True:
- message = await self.read_message()
-
- # Exit the loop when receiving a close frame.
- if message is None:
- break
-
- # Wait until there's room in the queue (if necessary).
- if self.max_queue is not None:
- while len(self.messages) >= self.max_queue:
- self._put_message_waiter = self.loop.create_future()
- try:
- await asyncio.shield(self._put_message_waiter)
- finally:
- self._put_message_waiter = None
-
- # Put the message in the queue.
- self.messages.append(message)
-
- # Notify recv().
- if self._pop_message_waiter is not None:
- self._pop_message_waiter.set_result(None)
- self._pop_message_waiter = None
-
- except asyncio.CancelledError as exc:
- self.transfer_data_exc = exc
- # If fail_connection() cancels this task, avoid logging the error
- # twice and failing the connection again.
- raise
-
- except ProtocolError as exc:
- self.transfer_data_exc = exc
- self.fail_connection(CloseCode.PROTOCOL_ERROR)
-
- except (ConnectionError, TimeoutError, EOFError, ssl.SSLError) as exc:
- # Reading data with self.reader.readexactly may raise:
- # - most subclasses of ConnectionError if the TCP connection
- # breaks, is reset, or is aborted;
- # - TimeoutError if the TCP connection times out;
- # - IncompleteReadError, a subclass of EOFError, if fewer
- # bytes are available than requested;
- # - ssl.SSLError if the other side infringes the TLS protocol.
- self.transfer_data_exc = exc
- self.fail_connection(CloseCode.ABNORMAL_CLOSURE)
-
- except UnicodeDecodeError as exc:
- self.transfer_data_exc = exc
- self.fail_connection(CloseCode.INVALID_DATA)
-
- except PayloadTooBig as exc:
- self.transfer_data_exc = exc
- self.fail_connection(CloseCode.MESSAGE_TOO_BIG)
-
- except Exception as exc:
- # This shouldn't happen often because exceptions expected under
- # regular circumstances are handled above. If it does, consider
- # catching and handling more exceptions.
- self.logger.error("data transfer failed", exc_info=True)
-
- self.transfer_data_exc = exc
- self.fail_connection(CloseCode.INTERNAL_ERROR)
-
- async def read_message(self) -> Optional[Data]:
- """
- Read a single message from the connection.
-
- Re-assemble data frames if the message is fragmented.
-
- Return :obj:`None` when the closing handshake is started.
-
- """
- frame = await self.read_data_frame(max_size=self.max_size)
-
- # A close frame was received.
- if frame is None:
- return None
-
- if frame.opcode == OP_TEXT:
- text = True
- elif frame.opcode == OP_BINARY:
- text = False
- else: # frame.opcode == OP_CONT
- raise ProtocolError("unexpected opcode")
-
- # Shortcut for the common case - no fragmentation
- if frame.fin:
- return frame.data.decode("utf-8") if text else frame.data
-
- # 5.4. Fragmentation
- fragments: List[Data] = []
- max_size = self.max_size
- if text:
- decoder_factory = codecs.getincrementaldecoder("utf-8")
- decoder = decoder_factory(errors="strict")
- if max_size is None:
-
- def append(frame: Frame) -> None:
- nonlocal fragments
- fragments.append(decoder.decode(frame.data, frame.fin))
-
- else:
-
- def append(frame: Frame) -> None:
- nonlocal fragments, max_size
- fragments.append(decoder.decode(frame.data, frame.fin))
- assert isinstance(max_size, int)
- max_size -= len(frame.data)
-
- else:
- if max_size is None:
-
- def append(frame: Frame) -> None:
- nonlocal fragments
- fragments.append(frame.data)
-
- else:
-
- def append(frame: Frame) -> None:
- nonlocal fragments, max_size
- fragments.append(frame.data)
- assert isinstance(max_size, int)
- max_size -= len(frame.data)
-
- append(frame)
-
- while not frame.fin:
- frame = await self.read_data_frame(max_size=max_size)
- if frame is None:
- raise ProtocolError("incomplete fragmented message")
- if frame.opcode != OP_CONT:
- raise ProtocolError("unexpected opcode")
- append(frame)
-
- return ("" if text else b"").join(fragments)
-
- async def read_data_frame(self, max_size: Optional[int]) -> Optional[Frame]:
- """
- Read a single data frame from the connection.
-
- Process control frames received before the next data frame.
-
- Return :obj:`None` if a close frame is encountered before any data frame.
-
- """
- # 6.2. Receiving Data
- while True:
- frame = await self.read_frame(max_size)
-
- # 5.5. Control Frames
- if frame.opcode == OP_CLOSE:
- # 7.1.5. The WebSocket Connection Close Code
- # 7.1.6. The WebSocket Connection Close Reason
- self.close_rcvd = Close.parse(frame.data)
- if self.close_sent is not None:
- self.close_rcvd_then_sent = False
- try:
- # Echo the original data instead of re-serializing it with
- # Close.serialize() because that fails when the close frame
- # is empty and Close.parse() synthesizes a 1005 close code.
- await self.write_close_frame(self.close_rcvd, frame.data)
- except ConnectionClosed:
- # Connection closed before we could echo the close frame.
- pass
- return None
-
- elif frame.opcode == OP_PING:
- # Answer pings, unless connection is CLOSING.
- if self.state is State.OPEN:
- try:
- await self.pong(frame.data)
- except ConnectionClosed:
- # Connection closed while draining write buffer.
- pass
-
- elif frame.opcode == OP_PONG:
- if frame.data in self.pings:
- pong_timestamp = time.perf_counter()
- # Sending a pong for only the most recent ping is legal.
- # Acknowledge all previous pings too in that case.
- ping_id = None
- ping_ids = []
- for ping_id, (pong_waiter, ping_timestamp) in self.pings.items():
- ping_ids.append(ping_id)
- if not pong_waiter.done():
- pong_waiter.set_result(pong_timestamp - ping_timestamp)
- if ping_id == frame.data:
- self.latency = pong_timestamp - ping_timestamp
- break
- else:
- raise AssertionError("solicited pong not found in pings")
- # Remove acknowledged pings from self.pings.
- for ping_id in ping_ids:
- del self.pings[ping_id]
-
- # 5.6. Data Frames
- else:
- return frame
-
- async def read_frame(self, max_size: Optional[int]) -> Frame:
- """
- Read a single frame from the connection.
-
- """
- frame = await Frame.read(
- self.reader.readexactly,
- mask=not self.is_client,
- max_size=max_size,
- extensions=self.extensions,
- )
- if self.debug:
- self.logger.debug("< %s", frame)
- return frame
-
- def write_frame_sync(self, fin: bool, opcode: int, data: bytes) -> None:
- frame = Frame(fin, Opcode(opcode), data)
- if self.debug:
- self.logger.debug("> %s", frame)
- frame.write(
- self.transport.write,
- mask=self.is_client,
- extensions=self.extensions,
- )
-
- async def drain(self) -> None:
- try:
- # drain() cannot be called concurrently by multiple coroutines:
- # http://bugs.python.org/issue29930. Remove this lock when no
- # version of Python where this bugs exists is supported anymore.
- async with self._drain_lock:
- # Handle flow control automatically.
- await self._drain()
- except ConnectionError:
- # Terminate the connection if the socket died.
- self.fail_connection()
- # Wait until the connection is closed to raise ConnectionClosed
- # with the correct code and reason.
- await self.ensure_open()
-
- async def write_frame(
- self, fin: bool, opcode: int, data: bytes, *, _state: int = State.OPEN
- ) -> None:
- # Defensive assertion for protocol compliance.
- if self.state is not _state: # pragma: no cover
- raise InvalidState(
- f"Cannot write to a WebSocket in the {self.state.name} state"
- )
- self.write_frame_sync(fin, opcode, data)
- await self.drain()
-
- async def write_close_frame(
- self, close: Close, data: Optional[bytes] = None
- ) -> None:
- """
- Write a close frame if and only if the connection state is OPEN.
-
- This dedicated coroutine must be used for writing close frames to
- ensure that at most one close frame is sent on a given connection.
-
- """
- # Test and set the connection state before sending the close frame to
- # avoid sending two frames in case of concurrent calls.
- if self.state is State.OPEN:
- # 7.1.3. The WebSocket Closing Handshake is Started
- self.state = State.CLOSING
- if self.debug:
- self.logger.debug("= connection is CLOSING")
-
- self.close_sent = close
- if self.close_rcvd is not None:
- self.close_rcvd_then_sent = True
- if data is None:
- data = close.serialize()
-
- # 7.1.2. Start the WebSocket Closing Handshake
- await self.write_frame(True, OP_CLOSE, data, _state=State.CLOSING)
-
- async def keepalive_ping(self) -> None:
- """
- Send a Ping frame and wait for a Pong frame at regular intervals.
-
- This coroutine exits when the connection terminates and one of the
- following happens:
-
- - :meth:`ping` raises :exc:`ConnectionClosed`, or
- - :meth:`close_connection` cancels :attr:`keepalive_ping_task`.
-
- """
- if self.ping_interval is None:
- return
-
- try:
- while True:
- await asyncio.sleep(self.ping_interval)
-
- # ping() raises CancelledError if the connection is closed,
- # when close_connection() cancels self.keepalive_ping_task.
-
- # ping() raises ConnectionClosed if the connection is lost,
- # when connection_lost() calls abort_pings().
-
- self.logger.debug("% sending keepalive ping")
- pong_waiter = await self.ping()
-
- if self.ping_timeout is not None:
- try:
- async with asyncio_timeout(self.ping_timeout):
- await pong_waiter
- self.logger.debug("% received keepalive pong")
- except asyncio.TimeoutError:
- if self.debug:
- self.logger.debug("! timed out waiting for keepalive pong")
- self.fail_connection(
- CloseCode.INTERNAL_ERROR,
- "keepalive ping timeout",
- )
- break
-
- except ConnectionClosed:
- pass
-
- except Exception:
- self.logger.error("keepalive ping failed", exc_info=True)
-
- async def close_connection(self) -> None:
- """
- 7.1.1. Close the WebSocket Connection
-
- When the opening handshake succeeds, :meth:`connection_open` starts
- this coroutine in a task. It waits for the data transfer phase to
- complete then it closes the TCP connection cleanly.
-
- When the opening handshake fails, :meth:`fail_connection` does the
- same. There's no data transfer phase in that case.
-
- """
- try:
- # Wait for the data transfer phase to complete.
- if hasattr(self, "transfer_data_task"):
- try:
- await self.transfer_data_task
- except asyncio.CancelledError:
- pass
-
- # Cancel the keepalive ping task.
- if hasattr(self, "keepalive_ping_task"):
- self.keepalive_ping_task.cancel()
-
- # A client should wait for a TCP close from the server.
- if self.is_client and hasattr(self, "transfer_data_task"):
- if await self.wait_for_connection_lost():
- return
- if self.debug:
- self.logger.debug("! timed out waiting for TCP close")
-
- # Half-close the TCP connection if possible (when there's no TLS).
- if self.transport.can_write_eof():
- if self.debug:
- self.logger.debug("x half-closing TCP connection")
- # write_eof() doesn't document which exceptions it raises.
- # "[Errno 107] Transport endpoint is not connected" happens
- # but it isn't completely clear under which circumstances.
- # uvloop can raise RuntimeError here.
- try:
- self.transport.write_eof()
- except (OSError, RuntimeError): # pragma: no cover
- pass
-
- if await self.wait_for_connection_lost():
- return
- if self.debug:
- self.logger.debug("! timed out waiting for TCP close")
-
- finally:
- # The try/finally ensures that the transport never remains open,
- # even if this coroutine is canceled (for example).
- await self.close_transport()
-
- async def close_transport(self) -> None:
- """
- Close the TCP connection.
-
- """
- # If connection_lost() was called, the TCP connection is closed.
- # However, if TLS is enabled, the transport still needs closing.
- # Else asyncio complains: ResourceWarning: unclosed transport.
- if self.connection_lost_waiter.done() and self.transport.is_closing():
- return
-
- # Close the TCP connection. Buffers are flushed asynchronously.
- if self.debug:
- self.logger.debug("x closing TCP connection")
- self.transport.close()
-
- if await self.wait_for_connection_lost():
- return
- if self.debug:
- self.logger.debug("! timed out waiting for TCP close")
-
- # Abort the TCP connection. Buffers are discarded.
- if self.debug:
- self.logger.debug("x aborting TCP connection")
- # Due to a bug in coverage, this is erroneously reported as not covered.
- self.transport.abort() # pragma: no cover
-
- # connection_lost() is called quickly after aborting.
- await self.wait_for_connection_lost()
-
- async def wait_for_connection_lost(self) -> bool:
- """
- Wait until the TCP connection is closed or ``self.close_timeout`` elapses.
-
- Return :obj:`True` if the connection is closed and :obj:`False`
- otherwise.
-
- """
- if not self.connection_lost_waiter.done():
- try:
- async with asyncio_timeout(self.close_timeout):
- await asyncio.shield(self.connection_lost_waiter)
- except asyncio.TimeoutError:
- pass
- # Re-check self.connection_lost_waiter.done() synchronously because
- # connection_lost() could run between the moment the timeout occurs
- # and the moment this coroutine resumes running.
- return self.connection_lost_waiter.done()
-
- def fail_connection(
- self,
- code: int = CloseCode.ABNORMAL_CLOSURE,
- reason: str = "",
- ) -> None:
- """
- 7.1.7. Fail the WebSocket Connection
-
- This requires:
-
- 1. Stopping all processing of incoming data, which means cancelling
- :attr:`transfer_data_task`. The close code will be 1006 unless a
- close frame was received earlier.
-
- 2. Sending a close frame with an appropriate code if the opening
- handshake succeeded and the other side is likely to process it.
-
- 3. Closing the connection. :meth:`close_connection` takes care of
- this once :attr:`transfer_data_task` exits after being canceled.
-
- (The specification describes these steps in the opposite order.)
-
- """
- if self.debug:
- self.logger.debug("! failing connection with code %d", code)
-
- # Cancel transfer_data_task if the opening handshake succeeded.
- # cancel() is idempotent and ignored if the task is done already.
- if hasattr(self, "transfer_data_task"):
- self.transfer_data_task.cancel()
-
- # Send a close frame when the state is OPEN (a close frame was already
- # sent if it's CLOSING), except when failing the connection because of
- # an error reading from or writing to the network.
- # Don't send a close frame if the connection is broken.
- if code != CloseCode.ABNORMAL_CLOSURE and self.state is State.OPEN:
- close = Close(code, reason)
-
- # Write the close frame without draining the write buffer.
-
- # Keeping fail_connection() synchronous guarantees it can't
- # get stuck and simplifies the implementation of the callers.
- # Not drainig the write buffer is acceptable in this context.
-
- # This duplicates a few lines of code from write_close_frame().
-
- self.state = State.CLOSING
- if self.debug:
- self.logger.debug("= connection is CLOSING")
-
- # If self.close_rcvd was set, the connection state would be
- # CLOSING. Therefore self.close_rcvd isn't set and we don't
- # have to set self.close_rcvd_then_sent.
- assert self.close_rcvd is None
- self.close_sent = close
-
- self.write_frame_sync(True, OP_CLOSE, close.serialize())
-
- # Start close_connection_task if the opening handshake didn't succeed.
- if not hasattr(self, "close_connection_task"):
- self.close_connection_task = self.loop.create_task(self.close_connection())
-
- def abort_pings(self) -> None:
- """
- Raise ConnectionClosed in pending keepalive pings.
-
- They'll never receive a pong once the connection is closed.
-
- """
- assert self.state is State.CLOSED
- exc = self.connection_closed_exc()
-
- for pong_waiter, _ping_timestamp in self.pings.values():
- pong_waiter.set_exception(exc)
- # If the exception is never retrieved, it will be logged when ping
- # is garbage-collected. This is confusing for users.
- # Given that ping is done (with an exception), canceling it does
- # nothing, but it prevents logging the exception.
- pong_waiter.cancel()
-
- # asyncio.Protocol methods
-
- def connection_made(self, transport: asyncio.BaseTransport) -> None:
- """
- Configure write buffer limits.
-
- The high-water limit is defined by ``self.write_limit``.
-
- The low-water limit currently defaults to ``self.write_limit // 4`` in
- :meth:`~asyncio.WriteTransport.set_write_buffer_limits`, which should
- be all right for reasonable use cases of this library.
-
- This is the earliest point where we can get hold of the transport,
- which means it's the best point for configuring it.
-
- """
- transport = cast(asyncio.Transport, transport)
- transport.set_write_buffer_limits(self.write_limit)
- self.transport = transport
-
- # Copied from asyncio.StreamReaderProtocol
- self.reader.set_transport(transport)
-
- def connection_lost(self, exc: Optional[Exception]) -> None:
- """
- 7.1.4. The WebSocket Connection is Closed.
-
- """
- self.state = State.CLOSED
- self.logger.debug("= connection is CLOSED")
-
- self.abort_pings()
-
- # If self.connection_lost_waiter isn't pending, that's a bug, because:
- # - it's set only here in connection_lost() which is called only once;
- # - it must never be canceled.
- self.connection_lost_waiter.set_result(None)
-
- if True: # pragma: no cover
- # Copied from asyncio.StreamReaderProtocol
- if self.reader is not None:
- if exc is None:
- self.reader.feed_eof()
- else:
- self.reader.set_exception(exc)
-
- # Copied from asyncio.FlowControlMixin
- # Wake up the writer if currently paused.
- if not self._paused:
- return
- waiter = self._drain_waiter
- if waiter is None:
- return
- self._drain_waiter = None
- if waiter.done():
- return
- if exc is None:
- waiter.set_result(None)
- else:
- waiter.set_exception(exc)
-
- def pause_writing(self) -> None: # pragma: no cover
- assert not self._paused
- self._paused = True
-
- def resume_writing(self) -> None: # pragma: no cover
- assert self._paused
- self._paused = False
-
- waiter = self._drain_waiter
- if waiter is not None:
- self._drain_waiter = None
- if not waiter.done():
- waiter.set_result(None)
-
- def data_received(self, data: bytes) -> None:
- self.reader.feed_data(data)
-
- def eof_received(self) -> None:
- """
- Close the transport after receiving EOF.
-
- The WebSocket protocol has its own closing handshake: endpoints close
- the TCP or TLS connection after sending and receiving a close frame.
-
- As a consequence, they never need to write after receiving EOF, so
- there's no reason to keep the transport open by returning :obj:`True`.
-
- Besides, that doesn't work on TLS connections.
-
- """
- self.reader.feed_eof()
-
-
-def broadcast(
- websockets: Iterable[WebSocketCommonProtocol],
- message: Data,
- raise_exceptions: bool = False,
-) -> None:
- """
- Broadcast a message to several WebSocket connections.
-
- A string (:class:`str`) is sent as a Text_ frame. A bytestring or bytes-like
- object (:class:`bytes`, :class:`bytearray`, or :class:`memoryview`) is sent
- as a Binary_ frame.
-
- .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
- .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
- :func:`broadcast` pushes the message synchronously to all connections even
- if their write buffers are overflowing. There's no backpressure.
-
- If you broadcast messages faster than a connection can handle them, messages
- will pile up in its write buffer until the connection times out. Keep
- ``ping_interval`` and ``ping_timeout`` low to prevent excessive memory usage
- from slow connections.
-
- Unlike :meth:`~websockets.server.WebSocketServerProtocol.send`,
- :func:`broadcast` doesn't support sending fragmented messages. Indeed,
- fragmentation is useful for sending large messages without buffering them in
- memory, while :func:`broadcast` buffers one copy per connection as fast as
- possible.
-
- :func:`broadcast` skips connections that aren't open in order to avoid
- errors on connections where the closing handshake is in progress.
-
- :func:`broadcast` ignores failures to write the message on some connections.
- It continues writing to other connections. On Python 3.11 and above, you
- may set ``raise_exceptions`` to :obj:`True` to record failures and raise all
- exceptions in a :pep:`654` :exc:`ExceptionGroup`.
-
- Args:
- websockets: WebSocket connections to which the message will be sent.
- message: Message to send.
- raise_exceptions: Whether to raise an exception in case of failures.
-
- Raises:
- TypeError: If ``message`` doesn't have a supported type.
-
- """
- if not isinstance(message, (str, bytes, bytearray, memoryview)):
- raise TypeError("data must be str or bytes-like")
-
- if raise_exceptions:
- if sys.version_info[:2] < (3, 11): # pragma: no cover
- raise ValueError("raise_exceptions requires at least Python 3.11")
- exceptions = []
-
- opcode, data = prepare_data(message)
-
- for websocket in websockets:
- if websocket.state is not State.OPEN:
- continue
-
- if websocket._fragmented_message_waiter is not None:
- if raise_exceptions:
- exception = RuntimeError("sending a fragmented message")
- exceptions.append(exception)
- else:
- websocket.logger.warning(
- "skipped broadcast: sending a fragmented message",
- )
-
- try:
- websocket.write_frame_sync(True, opcode, data)
- except Exception as write_exception:
- if raise_exceptions:
- exception = RuntimeError("failed to write message")
- exception.__cause__ = write_exception
- exceptions.append(exception)
- else:
- websocket.logger.warning(
- "skipped broadcast: failed to write message",
- exc_info=True,
- )
-
- if raise_exceptions:
- raise ExceptionGroup("skipped broadcast", exceptions)
diff --git a/venv/lib/python3.11/site-packages/websockets/legacy/server.py b/venv/lib/python3.11/site-packages/websockets/legacy/server.py
deleted file mode 100644
index 7c24dd7..0000000
--- a/venv/lib/python3.11/site-packages/websockets/legacy/server.py
+++ /dev/null
@@ -1,1185 +0,0 @@
-from __future__ import annotations
-
-import asyncio
-import email.utils
-import functools
-import http
-import inspect
-import logging
-import socket
-import warnings
-from types import TracebackType
-from typing import (
- Any,
- Awaitable,
- Callable,
- Generator,
- Iterable,
- List,
- Optional,
- Sequence,
- Set,
- Tuple,
- Type,
- Union,
- cast,
-)
-
-from ..datastructures import Headers, HeadersLike, MultipleValuesError
-from ..exceptions import (
- AbortHandshake,
- InvalidHandshake,
- InvalidHeader,
- InvalidMessage,
- InvalidOrigin,
- InvalidUpgrade,
- NegotiationError,
-)
-from ..extensions import Extension, ServerExtensionFactory
-from ..extensions.permessage_deflate import enable_server_permessage_deflate
-from ..headers import (
- build_extension,
- parse_extension,
- parse_subprotocol,
- validate_subprotocols,
-)
-from ..http import USER_AGENT
-from ..protocol import State
-from ..typing import ExtensionHeader, LoggerLike, Origin, StatusLike, Subprotocol
-from .compatibility import asyncio_timeout
-from .handshake import build_response, check_request
-from .http import read_request
-from .protocol import WebSocketCommonProtocol
-
-
-__all__ = ["serve", "unix_serve", "WebSocketServerProtocol", "WebSocketServer"]
-
-
-HeadersLikeOrCallable = Union[HeadersLike, Callable[[str, Headers], HeadersLike]]
-
-HTTPResponse = Tuple[StatusLike, HeadersLike, bytes]
-
-
-class WebSocketServerProtocol(WebSocketCommonProtocol):
- """
- WebSocket server connection.
-
- :class:`WebSocketServerProtocol` provides :meth:`recv` and :meth:`send`
- coroutines for receiving and sending messages.
-
- It supports asynchronous iteration to receive messages::
-
- async for message in websocket:
- await process(message)
-
- The iterator exits normally when the connection is closed with close code
- 1000 (OK) or 1001 (going away) or without a close code. It raises
- a :exc:`~websockets.exceptions.ConnectionClosedError` when the connection
- is closed with any other code.
-
- You may customize the opening handshake in a subclass by
- overriding :meth:`process_request` or :meth:`select_subprotocol`.
-
- Args:
- ws_server: WebSocket server that created this connection.
-
- See :func:`serve` for the documentation of ``ws_handler``, ``logger``, ``origins``,
- ``extensions``, ``subprotocols``, ``extra_headers``, and ``server_header``.
-
- See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
- documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
- ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
-
- """
-
- is_client = False
- side = "server"
-
- def __init__(
- self,
- ws_handler: Union[
- Callable[[WebSocketServerProtocol], Awaitable[Any]],
- Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
- ],
- ws_server: WebSocketServer,
- *,
- logger: Optional[LoggerLike] = None,
- origins: Optional[Sequence[Optional[Origin]]] = None,
- extensions: Optional[Sequence[ServerExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- extra_headers: Optional[HeadersLikeOrCallable] = None,
- server_header: Optional[str] = USER_AGENT,
- process_request: Optional[
- Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
- ] = None,
- select_subprotocol: Optional[
- Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
- ] = None,
- open_timeout: Optional[float] = 10,
- **kwargs: Any,
- ) -> None:
- if logger is None:
- logger = logging.getLogger("websockets.server")
- super().__init__(logger=logger, **kwargs)
- # For backwards compatibility with 6.0 or earlier.
- if origins is not None and "" in origins:
- warnings.warn("use None instead of '' in origins", DeprecationWarning)
- origins = [None if origin == "" else origin for origin in origins]
- # For backwards compatibility with 10.0 or earlier. Done here in
- # addition to serve to trigger the deprecation warning on direct
- # use of WebSocketServerProtocol.
- self.ws_handler = remove_path_argument(ws_handler)
- self.ws_server = ws_server
- self.origins = origins
- self.available_extensions = extensions
- self.available_subprotocols = subprotocols
- self.extra_headers = extra_headers
- self.server_header = server_header
- self._process_request = process_request
- self._select_subprotocol = select_subprotocol
- self.open_timeout = open_timeout
-
- def connection_made(self, transport: asyncio.BaseTransport) -> None:
- """
- Register connection and initialize a task to handle it.
-
- """
- super().connection_made(transport)
- # Register the connection with the server before creating the handler
- # task. Registering at the beginning of the handler coroutine would
- # create a race condition between the creation of the task, which
- # schedules its execution, and the moment the handler starts running.
- self.ws_server.register(self)
- self.handler_task = self.loop.create_task(self.handler())
-
- async def handler(self) -> None:
- """
- Handle the lifecycle of a WebSocket connection.
-
- Since this method doesn't have a caller able to handle exceptions, it
- attempts to log relevant ones and guarantees that the TCP connection is
- closed before exiting.
-
- """
- try:
- try:
- async with asyncio_timeout(self.open_timeout):
- await self.handshake(
- origins=self.origins,
- available_extensions=self.available_extensions,
- available_subprotocols=self.available_subprotocols,
- extra_headers=self.extra_headers,
- )
- except asyncio.TimeoutError: # pragma: no cover
- raise
- except ConnectionError:
- raise
- except Exception as exc:
- if isinstance(exc, AbortHandshake):
- status, headers, body = exc.status, exc.headers, exc.body
- elif isinstance(exc, InvalidOrigin):
- if self.debug:
- self.logger.debug("! invalid origin", exc_info=True)
- status, headers, body = (
- http.HTTPStatus.FORBIDDEN,
- Headers(),
- f"Failed to open a WebSocket connection: {exc}.\n".encode(),
- )
- elif isinstance(exc, InvalidUpgrade):
- if self.debug:
- self.logger.debug("! invalid upgrade", exc_info=True)
- status, headers, body = (
- http.HTTPStatus.UPGRADE_REQUIRED,
- Headers([("Upgrade", "websocket")]),
- (
- f"Failed to open a WebSocket connection: {exc}.\n"
- f"\n"
- f"You cannot access a WebSocket server directly "
- f"with a browser. You need a WebSocket client.\n"
- ).encode(),
- )
- elif isinstance(exc, InvalidHandshake):
- if self.debug:
- self.logger.debug("! invalid handshake", exc_info=True)
- status, headers, body = (
- http.HTTPStatus.BAD_REQUEST,
- Headers(),
- f"Failed to open a WebSocket connection: {exc}.\n".encode(),
- )
- else:
- self.logger.error("opening handshake failed", exc_info=True)
- status, headers, body = (
- http.HTTPStatus.INTERNAL_SERVER_ERROR,
- Headers(),
- (
- b"Failed to open a WebSocket connection.\n"
- b"See server log for more information.\n"
- ),
- )
-
- headers.setdefault("Date", email.utils.formatdate(usegmt=True))
- if self.server_header is not None:
- headers.setdefault("Server", self.server_header)
-
- headers.setdefault("Content-Length", str(len(body)))
- headers.setdefault("Content-Type", "text/plain")
- headers.setdefault("Connection", "close")
-
- self.write_http_response(status, headers, body)
- self.logger.info(
- "connection rejected (%d %s)", status.value, status.phrase
- )
- await self.close_transport()
- return
-
- try:
- await self.ws_handler(self)
- except Exception:
- self.logger.error("connection handler failed", exc_info=True)
- if not self.closed:
- self.fail_connection(1011)
- raise
-
- try:
- await self.close()
- except ConnectionError:
- raise
- except Exception:
- self.logger.error("closing handshake failed", exc_info=True)
- raise
-
- except Exception:
- # Last-ditch attempt to avoid leaking connections on errors.
- try:
- self.transport.close()
- except Exception: # pragma: no cover
- pass
-
- finally:
- # Unregister the connection with the server when the handler task
- # terminates. Registration is tied to the lifecycle of the handler
- # task because the server waits for tasks attached to registered
- # connections before terminating.
- self.ws_server.unregister(self)
- self.logger.info("connection closed")
-
- async def read_http_request(self) -> Tuple[str, Headers]:
- """
- Read request line and headers from the HTTP request.
-
- If the request contains a body, it may be read from ``self.reader``
- after this coroutine returns.
-
- Raises:
- InvalidMessage: if the HTTP message is malformed or isn't an
- HTTP/1.1 GET request.
-
- """
- try:
- path, headers = await read_request(self.reader)
- except asyncio.CancelledError: # pragma: no cover
- raise
- except Exception as exc:
- raise InvalidMessage("did not receive a valid HTTP request") from exc
-
- if self.debug:
- self.logger.debug("< GET %s HTTP/1.1", path)
- for key, value in headers.raw_items():
- self.logger.debug("< %s: %s", key, value)
-
- self.path = path
- self.request_headers = headers
-
- return path, headers
-
- def write_http_response(
- self, status: http.HTTPStatus, headers: Headers, body: Optional[bytes] = None
- ) -> None:
- """
- Write status line and headers to the HTTP response.
-
- This coroutine is also able to write a response body.
-
- """
- self.response_headers = headers
-
- if self.debug:
- self.logger.debug("> HTTP/1.1 %d %s", status.value, status.phrase)
- for key, value in headers.raw_items():
- self.logger.debug("> %s: %s", key, value)
- if body is not None:
- self.logger.debug("> [body] (%d bytes)", len(body))
-
- # Since the status line and headers only contain ASCII characters,
- # we can keep this simple.
- response = f"HTTP/1.1 {status.value} {status.phrase}\r\n"
- response += str(headers)
-
- self.transport.write(response.encode())
-
- if body is not None:
- self.transport.write(body)
-
- async def process_request(
- self, path: str, request_headers: Headers
- ) -> Optional[HTTPResponse]:
- """
- Intercept the HTTP request and return an HTTP response if appropriate.
-
- You may override this method in a :class:`WebSocketServerProtocol`
- subclass, for example:
-
- * to return an HTTP 200 OK response on a given path; then a load
- balancer can use this path for a health check;
- * to authenticate the request and return an HTTP 401 Unauthorized or an
- HTTP 403 Forbidden when authentication fails.
-
- You may also override this method with the ``process_request``
- argument of :func:`serve` and :class:`WebSocketServerProtocol`. This
- is equivalent, except ``process_request`` won't have access to the
- protocol instance, so it can't store information for later use.
-
- :meth:`process_request` is expected to complete quickly. If it may run
- for a long time, then it should await :meth:`wait_closed` and exit if
- :meth:`wait_closed` completes, or else it could prevent the server
- from shutting down.
-
- Args:
- path: request path, including optional query string.
- request_headers: request headers.
-
- Returns:
- Optional[Tuple[StatusLike, HeadersLike, bytes]]: :obj:`None`
- to continue the WebSocket handshake normally.
-
- An HTTP response, represented by a 3-uple of the response status,
- headers, and body, to abort the WebSocket handshake and return
- that HTTP response instead.
-
- """
- if self._process_request is not None:
- response = self._process_request(path, request_headers)
- if isinstance(response, Awaitable):
- return await response
- else:
- # For backwards compatibility with 7.0.
- warnings.warn(
- "declare process_request as a coroutine", DeprecationWarning
- )
- return response
- return None
-
- @staticmethod
- def process_origin(
- headers: Headers, origins: Optional[Sequence[Optional[Origin]]] = None
- ) -> Optional[Origin]:
- """
- Handle the Origin HTTP request header.
-
- Args:
- headers: request headers.
- origins: optional list of acceptable origins.
-
- Raises:
- InvalidOrigin: if the origin isn't acceptable.
-
- """
- # "The user agent MUST NOT include more than one Origin header field"
- # per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3.
- try:
- origin = cast(Optional[Origin], headers.get("Origin"))
- except MultipleValuesError as exc:
- raise InvalidHeader("Origin", "more than one Origin header found") from exc
- if origins is not None:
- if origin not in origins:
- raise InvalidOrigin(origin)
- return origin
-
- @staticmethod
- def process_extensions(
- headers: Headers,
- available_extensions: Optional[Sequence[ServerExtensionFactory]],
- ) -> Tuple[Optional[str], List[Extension]]:
- """
- Handle the Sec-WebSocket-Extensions HTTP request header.
-
- Accept or reject each extension proposed in the client request.
- Negotiate parameters for accepted extensions.
-
- Return the Sec-WebSocket-Extensions HTTP response header and the list
- of accepted extensions.
-
- :rfc:`6455` leaves the rules up to the specification of each
- :extension.
-
- To provide this level of flexibility, for each extension proposed by
- the client, we check for a match with each extension available in the
- server configuration. If no match is found, the extension is ignored.
-
- If several variants of the same extension are proposed by the client,
- it may be accepted several times, which won't make sense in general.
- Extensions must implement their own requirements. For this purpose,
- the list of previously accepted extensions is provided.
-
- This process doesn't allow the server to reorder extensions. It can
- only select a subset of the extensions proposed by the client.
-
- Other requirements, for example related to mandatory extensions or the
- order of extensions, may be implemented by overriding this method.
-
- Args:
- headers: request headers.
- extensions: optional list of supported extensions.
-
- Raises:
- InvalidHandshake: to abort the handshake with an HTTP 400 error.
-
- """
- response_header_value: Optional[str] = None
-
- extension_headers: List[ExtensionHeader] = []
- accepted_extensions: List[Extension] = []
-
- header_values = headers.get_all("Sec-WebSocket-Extensions")
-
- if header_values and available_extensions:
- parsed_header_values: List[ExtensionHeader] = sum(
- [parse_extension(header_value) for header_value in header_values], []
- )
-
- for name, request_params in parsed_header_values:
- for ext_factory in available_extensions:
- # Skip non-matching extensions based on their name.
- if ext_factory.name != name:
- continue
-
- # Skip non-matching extensions based on their params.
- try:
- response_params, extension = ext_factory.process_request_params(
- request_params, accepted_extensions
- )
- except NegotiationError:
- continue
-
- # Add matching extension to the final list.
- extension_headers.append((name, response_params))
- accepted_extensions.append(extension)
-
- # Break out of the loop once we have a match.
- break
-
- # If we didn't break from the loop, no extension in our list
- # matched what the client sent. The extension is declined.
-
- # Serialize extension header.
- if extension_headers:
- response_header_value = build_extension(extension_headers)
-
- return response_header_value, accepted_extensions
-
- # Not @staticmethod because it calls self.select_subprotocol()
- def process_subprotocol(
- self, headers: Headers, available_subprotocols: Optional[Sequence[Subprotocol]]
- ) -> Optional[Subprotocol]:
- """
- Handle the Sec-WebSocket-Protocol HTTP request header.
-
- Return Sec-WebSocket-Protocol HTTP response header, which is the same
- as the selected subprotocol.
-
- Args:
- headers: request headers.
- available_subprotocols: optional list of supported subprotocols.
-
- Raises:
- InvalidHandshake: to abort the handshake with an HTTP 400 error.
-
- """
- subprotocol: Optional[Subprotocol] = None
-
- header_values = headers.get_all("Sec-WebSocket-Protocol")
-
- if header_values and available_subprotocols:
- parsed_header_values: List[Subprotocol] = sum(
- [parse_subprotocol(header_value) for header_value in header_values], []
- )
-
- subprotocol = self.select_subprotocol(
- parsed_header_values, available_subprotocols
- )
-
- return subprotocol
-
- def select_subprotocol(
- self,
- client_subprotocols: Sequence[Subprotocol],
- server_subprotocols: Sequence[Subprotocol],
- ) -> Optional[Subprotocol]:
- """
- Pick a subprotocol among those supported by the client and the server.
-
- If several subprotocols are available, select the preferred subprotocol
- by giving equal weight to the preferences of the client and the server.
-
- If no subprotocol is available, proceed without a subprotocol.
-
- You may provide a ``select_subprotocol`` argument to :func:`serve` or
- :class:`WebSocketServerProtocol` to override this logic. For example,
- you could reject the handshake if the client doesn't support a
- particular subprotocol, rather than accept the handshake without that
- subprotocol.
-
- Args:
- client_subprotocols: list of subprotocols offered by the client.
- server_subprotocols: list of subprotocols available on the server.
-
- Returns:
- Optional[Subprotocol]: Selected subprotocol, if a common subprotocol
- was found.
-
- :obj:`None` to continue without a subprotocol.
-
- """
- if self._select_subprotocol is not None:
- return self._select_subprotocol(client_subprotocols, server_subprotocols)
-
- subprotocols = set(client_subprotocols) & set(server_subprotocols)
- if not subprotocols:
- return None
- return sorted(
- subprotocols,
- key=lambda p: client_subprotocols.index(p) + server_subprotocols.index(p),
- )[0]
-
- async def handshake(
- self,
- origins: Optional[Sequence[Optional[Origin]]] = None,
- available_extensions: Optional[Sequence[ServerExtensionFactory]] = None,
- available_subprotocols: Optional[Sequence[Subprotocol]] = None,
- extra_headers: Optional[HeadersLikeOrCallable] = None,
- ) -> str:
- """
- Perform the server side of the opening handshake.
-
- Args:
- origins: list of acceptable values of the Origin HTTP header;
- include :obj:`None` if the lack of an origin is acceptable.
- extensions: list of supported extensions, in order in which they
- should be tried.
- subprotocols: list of supported subprotocols, in order of
- decreasing preference.
- extra_headers: arbitrary HTTP headers to add to the response when
- the handshake succeeds.
-
- Returns:
- str: path of the URI of the request.
-
- Raises:
- InvalidHandshake: if the handshake fails.
-
- """
- path, request_headers = await self.read_http_request()
-
- # Hook for customizing request handling, for example checking
- # authentication or treating some paths as plain HTTP endpoints.
- early_response_awaitable = self.process_request(path, request_headers)
- if isinstance(early_response_awaitable, Awaitable):
- early_response = await early_response_awaitable
- else:
- # For backwards compatibility with 7.0.
- warnings.warn("declare process_request as a coroutine", DeprecationWarning)
- early_response = early_response_awaitable
-
- # The connection may drop while process_request is running.
- if self.state is State.CLOSED:
- # This subclass of ConnectionError is silently ignored in handler().
- raise BrokenPipeError("connection closed during opening handshake")
-
- # Change the response to a 503 error if the server is shutting down.
- if not self.ws_server.is_serving():
- early_response = (
- http.HTTPStatus.SERVICE_UNAVAILABLE,
- [],
- b"Server is shutting down.\n",
- )
-
- if early_response is not None:
- raise AbortHandshake(*early_response)
-
- key = check_request(request_headers)
-
- self.origin = self.process_origin(request_headers, origins)
-
- extensions_header, self.extensions = self.process_extensions(
- request_headers, available_extensions
- )
-
- protocol_header = self.subprotocol = self.process_subprotocol(
- request_headers, available_subprotocols
- )
-
- response_headers = Headers()
-
- build_response(response_headers, key)
-
- if extensions_header is not None:
- response_headers["Sec-WebSocket-Extensions"] = extensions_header
-
- if protocol_header is not None:
- response_headers["Sec-WebSocket-Protocol"] = protocol_header
-
- if callable(extra_headers):
- extra_headers = extra_headers(path, self.request_headers)
- if extra_headers is not None:
- response_headers.update(extra_headers)
-
- response_headers.setdefault("Date", email.utils.formatdate(usegmt=True))
- if self.server_header is not None:
- response_headers.setdefault("Server", self.server_header)
-
- self.write_http_response(http.HTTPStatus.SWITCHING_PROTOCOLS, response_headers)
-
- self.logger.info("connection open")
-
- self.connection_open()
-
- return path
-
-
-class WebSocketServer:
- """
- WebSocket server returned by :func:`serve`.
-
- This class provides the same interface as :class:`~asyncio.Server`,
- notably the :meth:`~asyncio.Server.close`
- and :meth:`~asyncio.Server.wait_closed` methods.
-
- It keeps track of WebSocket connections in order to close them properly
- when shutting down.
-
- Args:
- logger: Logger for this server.
- It defaults to ``logging.getLogger("websockets.server")``.
- See the :doc:`logging guide <../../topics/logging>` for details.
-
- """
-
- def __init__(self, logger: Optional[LoggerLike] = None):
- if logger is None:
- logger = logging.getLogger("websockets.server")
- self.logger = logger
-
- # Keep track of active connections.
- self.websockets: Set[WebSocketServerProtocol] = set()
-
- # Task responsible for closing the server and terminating connections.
- self.close_task: Optional[asyncio.Task[None]] = None
-
- # Completed when the server is closed and connections are terminated.
- self.closed_waiter: asyncio.Future[None]
-
- def wrap(self, server: asyncio.base_events.Server) -> None:
- """
- Attach to a given :class:`~asyncio.Server`.
-
- Since :meth:`~asyncio.loop.create_server` doesn't support injecting a
- custom ``Server`` class, the easiest solution that doesn't rely on
- private :mod:`asyncio` APIs is to:
-
- - instantiate a :class:`WebSocketServer`
- - give the protocol factory a reference to that instance
- - call :meth:`~asyncio.loop.create_server` with the factory
- - attach the resulting :class:`~asyncio.Server` with this method
-
- """
- self.server = server
- for sock in server.sockets:
- if sock.family == socket.AF_INET:
- name = "%s:%d" % sock.getsockname()
- elif sock.family == socket.AF_INET6:
- name = "[%s]:%d" % sock.getsockname()[:2]
- elif sock.family == socket.AF_UNIX:
- name = sock.getsockname()
- # In the unlikely event that someone runs websockets over a
- # protocol other than IP or Unix sockets, avoid crashing.
- else: # pragma: no cover
- name = str(sock.getsockname())
- self.logger.info("server listening on %s", name)
-
- # Initialized here because we need a reference to the event loop.
- # This should be moved back to __init__ when dropping Python < 3.10.
- self.closed_waiter = server.get_loop().create_future()
-
- def register(self, protocol: WebSocketServerProtocol) -> None:
- """
- Register a connection with this server.
-
- """
- self.websockets.add(protocol)
-
- def unregister(self, protocol: WebSocketServerProtocol) -> None:
- """
- Unregister a connection with this server.
-
- """
- self.websockets.remove(protocol)
-
- def close(self, close_connections: bool = True) -> None:
- """
- Close the server.
-
- * Close the underlying :class:`~asyncio.Server`.
- * When ``close_connections`` is :obj:`True`, which is the default,
- close existing connections. Specifically:
-
- * Reject opening WebSocket connections with an HTTP 503 (service
- unavailable) error. This happens when the server accepted the TCP
- connection but didn't complete the opening handshake before closing.
- * Close open WebSocket connections with close code 1001 (going away).
-
- * Wait until all connection handlers terminate.
-
- :meth:`close` is idempotent.
-
- """
- if self.close_task is None:
- self.close_task = self.get_loop().create_task(
- self._close(close_connections)
- )
-
- async def _close(self, close_connections: bool) -> None:
- """
- Implementation of :meth:`close`.
-
- This calls :meth:`~asyncio.Server.close` on the underlying
- :class:`~asyncio.Server` object to stop accepting new connections and
- then closes open connections with close code 1001.
-
- """
- self.logger.info("server closing")
-
- # Stop accepting new connections.
- self.server.close()
-
- # Wait until all accepted connections reach connection_made() and call
- # register(). See https://bugs.python.org/issue34852 for details.
- await asyncio.sleep(0)
-
- if close_connections:
- # Close OPEN connections with close code 1001. After server.close(),
- # handshake() closes OPENING connections with an HTTP 503 error.
- close_tasks = [
- asyncio.create_task(websocket.close(1001))
- for websocket in self.websockets
- if websocket.state is not State.CONNECTING
- ]
- # asyncio.wait doesn't accept an empty first argument.
- if close_tasks:
- await asyncio.wait(close_tasks)
-
- # Wait until all TCP connections are closed.
- await self.server.wait_closed()
-
- # Wait until all connection handlers terminate.
- # asyncio.wait doesn't accept an empty first argument.
- if self.websockets:
- await asyncio.wait(
- [websocket.handler_task for websocket in self.websockets]
- )
-
- # Tell wait_closed() to return.
- self.closed_waiter.set_result(None)
-
- self.logger.info("server closed")
-
- async def wait_closed(self) -> None:
- """
- Wait until the server is closed.
-
- When :meth:`wait_closed` returns, all TCP connections are closed and
- all connection handlers have returned.
-
- To ensure a fast shutdown, a connection handler should always be
- awaiting at least one of:
-
- * :meth:`~WebSocketServerProtocol.recv`: when the connection is closed,
- it raises :exc:`~websockets.exceptions.ConnectionClosedOK`;
- * :meth:`~WebSocketServerProtocol.wait_closed`: when the connection is
- closed, it returns.
-
- Then the connection handler is immediately notified of the shutdown;
- it can clean up and exit.
-
- """
- await asyncio.shield(self.closed_waiter)
-
- def get_loop(self) -> asyncio.AbstractEventLoop:
- """
- See :meth:`asyncio.Server.get_loop`.
-
- """
- return self.server.get_loop()
-
- def is_serving(self) -> bool:
- """
- See :meth:`asyncio.Server.is_serving`.
-
- """
- return self.server.is_serving()
-
- async def start_serving(self) -> None: # pragma: no cover
- """
- See :meth:`asyncio.Server.start_serving`.
-
- Typical use::
-
- server = await serve(..., start_serving=False)
- # perform additional setup here...
- # ... then start the server
- await server.start_serving()
-
- """
- await self.server.start_serving()
-
- async def serve_forever(self) -> None: # pragma: no cover
- """
- See :meth:`asyncio.Server.serve_forever`.
-
- Typical use::
-
- server = await serve(...)
- # this coroutine doesn't return
- # canceling it stops the server
- await server.serve_forever()
-
- This is an alternative to using :func:`serve` as an asynchronous context
- manager. Shutdown is triggered by canceling :meth:`serve_forever`
- instead of exiting a :func:`serve` context.
-
- """
- await self.server.serve_forever()
-
- @property
- def sockets(self) -> Iterable[socket.socket]:
- """
- See :attr:`asyncio.Server.sockets`.
-
- """
- return self.server.sockets
-
- async def __aenter__(self) -> WebSocketServer: # pragma: no cover
- return self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_value: Optional[BaseException],
- traceback: Optional[TracebackType],
- ) -> None: # pragma: no cover
- self.close()
- await self.wait_closed()
-
-
-class Serve:
- """
- Start a WebSocket server listening on ``host`` and ``port``.
-
- Whenever a client connects, the server creates a
- :class:`WebSocketServerProtocol`, performs the opening handshake, and
- delegates to the connection handler, ``ws_handler``.
-
- The handler receives the :class:`WebSocketServerProtocol` and uses it to
- send and receive messages.
-
- Once the handler completes, either normally or with an exception, the
- server performs the closing handshake and closes the connection.
-
- Awaiting :func:`serve` yields a :class:`WebSocketServer`. This object
- provides a :meth:`~WebSocketServer.close` method to shut down the server::
-
- stop = asyncio.Future() # set this future to exit the server
-
- server = await serve(...)
- await stop
- await server.close()
-
- :func:`serve` can be used as an asynchronous context manager. Then, the
- server is shut down automatically when exiting the context::
-
- stop = asyncio.Future() # set this future to exit the server
-
- async with serve(...):
- await stop
-
- Args:
- ws_handler: Connection handler. It receives the WebSocket connection,
- which is a :class:`WebSocketServerProtocol`, in argument.
- host: Network interfaces the server binds to.
- See :meth:`~asyncio.loop.create_server` for details.
- port: TCP port the server listens on.
- See :meth:`~asyncio.loop.create_server` for details.
- create_protocol: Factory for the :class:`asyncio.Protocol` managing
- the connection. It defaults to :class:`WebSocketServerProtocol`.
- Set it to a wrapper or a subclass to customize connection handling.
- logger: Logger for this server.
- It defaults to ``logging.getLogger("websockets.server")``.
- See the :doc:`logging guide <../../topics/logging>` for details.
- compression: The "permessage-deflate" extension is enabled by default.
- Set ``compression`` to :obj:`None` to disable it. See the
- :doc:`compression guide <../../topics/compression>` for details.
- origins: Acceptable values of the ``Origin`` header, for defending
- against Cross-Site WebSocket Hijacking attacks. Include :obj:`None`
- in the list if the lack of an origin is acceptable.
- extensions: List of supported extensions, in order in which they
- should be negotiated and run.
- subprotocols: List of supported subprotocols, in order of decreasing
- preference.
- extra_headers (Union[HeadersLike, Callable[[str, Headers], HeadersLike]]):
- Arbitrary HTTP headers to add to the response. This can be
- a :data:`~websockets.datastructures.HeadersLike` or a callable
- taking the request path and headers in arguments and returning
- a :data:`~websockets.datastructures.HeadersLike`.
- server_header: Value of the ``Server`` response header.
- It defaults to ``"Python/x.y.z websockets/X.Y"``.
- Setting it to :obj:`None` removes the header.
- process_request (Optional[Callable[[str, Headers], \
- Awaitable[Optional[Tuple[StatusLike, HeadersLike, bytes]]]]]):
- Intercept HTTP request before the opening handshake.
- See :meth:`~WebSocketServerProtocol.process_request` for details.
- select_subprotocol: Select a subprotocol supported by the client.
- See :meth:`~WebSocketServerProtocol.select_subprotocol` for details.
- open_timeout: Timeout for opening connections in seconds.
- :obj:`None` disables the timeout.
-
- See :class:`~websockets.legacy.protocol.WebSocketCommonProtocol` for the
- documentation of ``ping_interval``, ``ping_timeout``, ``close_timeout``,
- ``max_size``, ``max_queue``, ``read_limit``, and ``write_limit``.
-
- Any other keyword arguments are passed the event loop's
- :meth:`~asyncio.loop.create_server` method.
-
- For example:
-
- * You can set ``ssl`` to a :class:`~ssl.SSLContext` to enable TLS.
-
- * You can set ``sock`` to a :obj:`~socket.socket` that you created
- outside of websockets.
-
- Returns:
- WebSocketServer: WebSocket server.
-
- """
-
- def __init__(
- self,
- ws_handler: Union[
- Callable[[WebSocketServerProtocol], Awaitable[Any]],
- Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
- ],
- host: Optional[Union[str, Sequence[str]]] = None,
- port: Optional[int] = None,
- *,
- create_protocol: Optional[Callable[..., WebSocketServerProtocol]] = None,
- logger: Optional[LoggerLike] = None,
- compression: Optional[str] = "deflate",
- origins: Optional[Sequence[Optional[Origin]]] = None,
- extensions: Optional[Sequence[ServerExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- extra_headers: Optional[HeadersLikeOrCallable] = None,
- server_header: Optional[str] = USER_AGENT,
- process_request: Optional[
- Callable[[str, Headers], Awaitable[Optional[HTTPResponse]]]
- ] = None,
- select_subprotocol: Optional[
- Callable[[Sequence[Subprotocol], Sequence[Subprotocol]], Subprotocol]
- ] = None,
- open_timeout: Optional[float] = 10,
- ping_interval: Optional[float] = 20,
- ping_timeout: Optional[float] = 20,
- close_timeout: Optional[float] = None,
- max_size: Optional[int] = 2**20,
- max_queue: Optional[int] = 2**5,
- read_limit: int = 2**16,
- write_limit: int = 2**16,
- **kwargs: Any,
- ) -> None:
- # Backwards compatibility: close_timeout used to be called timeout.
- timeout: Optional[float] = kwargs.pop("timeout", None)
- if timeout is None:
- timeout = 10
- else:
- warnings.warn("rename timeout to close_timeout", DeprecationWarning)
- # If both are specified, timeout is ignored.
- if close_timeout is None:
- close_timeout = timeout
-
- # Backwards compatibility: create_protocol used to be called klass.
- klass: Optional[Type[WebSocketServerProtocol]] = kwargs.pop("klass", None)
- if klass is None:
- klass = WebSocketServerProtocol
- else:
- warnings.warn("rename klass to create_protocol", DeprecationWarning)
- # If both are specified, klass is ignored.
- if create_protocol is None:
- create_protocol = klass
-
- # Backwards compatibility: recv() used to return None on closed connections
- legacy_recv: bool = kwargs.pop("legacy_recv", False)
-
- # Backwards compatibility: the loop parameter used to be supported.
- _loop: Optional[asyncio.AbstractEventLoop] = kwargs.pop("loop", None)
- if _loop is None:
- loop = asyncio.get_event_loop()
- else:
- loop = _loop
- warnings.warn("remove loop argument", DeprecationWarning)
-
- ws_server = WebSocketServer(logger=logger)
-
- secure = kwargs.get("ssl") is not None
-
- if compression == "deflate":
- extensions = enable_server_permessage_deflate(extensions)
- elif compression is not None:
- raise ValueError(f"unsupported compression: {compression}")
-
- if subprotocols is not None:
- validate_subprotocols(subprotocols)
-
- factory = functools.partial(
- create_protocol,
- # For backwards compatibility with 10.0 or earlier. Done here in
- # addition to WebSocketServerProtocol to trigger the deprecation
- # warning once per serve() call rather than once per connection.
- remove_path_argument(ws_handler),
- ws_server,
- host=host,
- port=port,
- secure=secure,
- open_timeout=open_timeout,
- ping_interval=ping_interval,
- ping_timeout=ping_timeout,
- close_timeout=close_timeout,
- max_size=max_size,
- max_queue=max_queue,
- read_limit=read_limit,
- write_limit=write_limit,
- loop=_loop,
- legacy_recv=legacy_recv,
- origins=origins,
- extensions=extensions,
- subprotocols=subprotocols,
- extra_headers=extra_headers,
- server_header=server_header,
- process_request=process_request,
- select_subprotocol=select_subprotocol,
- logger=logger,
- )
-
- if kwargs.pop("unix", False):
- path: Optional[str] = kwargs.pop("path", None)
- # unix_serve(path) must not specify host and port parameters.
- assert host is None and port is None
- create_server = functools.partial(
- loop.create_unix_server, factory, path, **kwargs
- )
- else:
- create_server = functools.partial(
- loop.create_server, factory, host, port, **kwargs
- )
-
- # This is a coroutine function.
- self._create_server = create_server
- self.ws_server = ws_server
-
- # async with serve(...)
-
- async def __aenter__(self) -> WebSocketServer:
- return await self
-
- async def __aexit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_value: Optional[BaseException],
- traceback: Optional[TracebackType],
- ) -> None:
- self.ws_server.close()
- await self.ws_server.wait_closed()
-
- # await serve(...)
-
- def __await__(self) -> Generator[Any, None, WebSocketServer]:
- # Create a suitable iterator by calling __await__ on a coroutine.
- return self.__await_impl__().__await__()
-
- async def __await_impl__(self) -> WebSocketServer:
- server = await self._create_server()
- self.ws_server.wrap(server)
- return self.ws_server
-
- # yield from serve(...) - remove when dropping Python < 3.10
-
- __iter__ = __await__
-
-
-serve = Serve
-
-
-def unix_serve(
- ws_handler: Union[
- Callable[[WebSocketServerProtocol], Awaitable[Any]],
- Callable[[WebSocketServerProtocol, str], Awaitable[Any]], # deprecated
- ],
- path: Optional[str] = None,
- **kwargs: Any,
-) -> Serve:
- """
- Start a WebSocket server listening on a Unix socket.
-
- This function is identical to :func:`serve`, except the ``host`` and
- ``port`` arguments are replaced by ``path``. It is only available on Unix.
-
- Unrecognized keyword arguments are passed the event loop's
- :meth:`~asyncio.loop.create_unix_server` method.
-
- It's useful for deploying a server behind a reverse proxy such as nginx.
-
- Args:
- path: File system path to the Unix socket.
-
- """
- return serve(ws_handler, path=path, unix=True, **kwargs)
-
-
-def remove_path_argument(
- ws_handler: Union[
- Callable[[WebSocketServerProtocol], Awaitable[Any]],
- Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
- ]
-) -> Callable[[WebSocketServerProtocol], Awaitable[Any]]:
- try:
- inspect.signature(ws_handler).bind(None)
- except TypeError:
- try:
- inspect.signature(ws_handler).bind(None, "")
- except TypeError: # pragma: no cover
- # ws_handler accepts neither one nor two arguments; leave it alone.
- pass
- else:
- # ws_handler accepts two arguments; activate backwards compatibility.
-
- # Enable deprecation warning and announce deprecation in 11.0.
- # warnings.warn("remove second argument of ws_handler", DeprecationWarning)
-
- async def _ws_handler(websocket: WebSocketServerProtocol) -> Any:
- return await cast(
- Callable[[WebSocketServerProtocol, str], Awaitable[Any]],
- ws_handler,
- )(websocket, websocket.path)
-
- return _ws_handler
-
- return cast(
- Callable[[WebSocketServerProtocol], Awaitable[Any]],
- ws_handler,
- )
diff --git a/venv/lib/python3.11/site-packages/websockets/protocol.py b/venv/lib/python3.11/site-packages/websockets/protocol.py
deleted file mode 100644
index 765e6b9..0000000
--- a/venv/lib/python3.11/site-packages/websockets/protocol.py
+++ /dev/null
@@ -1,708 +0,0 @@
-from __future__ import annotations
-
-import enum
-import logging
-import uuid
-from typing import Generator, List, Optional, Type, Union
-
-from .exceptions import (
- ConnectionClosed,
- ConnectionClosedError,
- ConnectionClosedOK,
- InvalidState,
- PayloadTooBig,
- ProtocolError,
-)
-from .extensions import Extension
-from .frames import (
- OK_CLOSE_CODES,
- OP_BINARY,
- OP_CLOSE,
- OP_CONT,
- OP_PING,
- OP_PONG,
- OP_TEXT,
- Close,
- CloseCode,
- Frame,
-)
-from .http11 import Request, Response
-from .streams import StreamReader
-from .typing import LoggerLike, Origin, Subprotocol
-
-
-__all__ = [
- "Protocol",
- "Side",
- "State",
- "SEND_EOF",
-]
-
-Event = Union[Request, Response, Frame]
-"""Events that :meth:`~Protocol.events_received` may return."""
-
-
-class Side(enum.IntEnum):
- """A WebSocket connection is either a server or a client."""
-
- SERVER, CLIENT = range(2)
-
-
-SERVER = Side.SERVER
-CLIENT = Side.CLIENT
-
-
-class State(enum.IntEnum):
- """A WebSocket connection is in one of these four states."""
-
- CONNECTING, OPEN, CLOSING, CLOSED = range(4)
-
-
-CONNECTING = State.CONNECTING
-OPEN = State.OPEN
-CLOSING = State.CLOSING
-CLOSED = State.CLOSED
-
-
-SEND_EOF = b""
-"""Sentinel signaling that the TCP connection must be half-closed."""
-
-
-class Protocol:
- """
- Sans-I/O implementation of a WebSocket connection.
-
- Args:
- side: :attr:`~Side.CLIENT` or :attr:`~Side.SERVER`.
- state: initial state of the WebSocket connection.
- max_size: maximum size of incoming messages in bytes;
- :obj:`None` disables the limit.
- logger: logger for this connection; depending on ``side``,
- defaults to ``logging.getLogger("websockets.client")``
- or ``logging.getLogger("websockets.server")``;
- see the :doc:`logging guide <../../topics/logging>` for details.
-
- """
-
- def __init__(
- self,
- side: Side,
- *,
- state: State = OPEN,
- max_size: Optional[int] = 2**20,
- logger: Optional[LoggerLike] = None,
- ) -> None:
- # Unique identifier. For logs.
- self.id: uuid.UUID = uuid.uuid4()
- """Unique identifier of the connection. Useful in logs."""
-
- # Logger or LoggerAdapter for this connection.
- if logger is None:
- logger = logging.getLogger(f"websockets.{side.name.lower()}")
- self.logger: LoggerLike = logger
- """Logger for this connection."""
-
- # Track if DEBUG is enabled. Shortcut logging calls if it isn't.
- self.debug = logger.isEnabledFor(logging.DEBUG)
-
- # Connection side. CLIENT or SERVER.
- self.side = side
-
- # Connection state. Initially OPEN because subclasses handle CONNECTING.
- self.state = state
-
- # Maximum size of incoming messages in bytes.
- self.max_size = max_size
-
- # Current size of incoming message in bytes. Only set while reading a
- # fragmented message i.e. a data frames with the FIN bit not set.
- self.cur_size: Optional[int] = None
-
- # True while sending a fragmented message i.e. a data frames with the
- # FIN bit not set.
- self.expect_continuation_frame = False
-
- # WebSocket protocol parameters.
- self.origin: Optional[Origin] = None
- self.extensions: List[Extension] = []
- self.subprotocol: Optional[Subprotocol] = None
-
- # Close code and reason, set when a close frame is sent or received.
- self.close_rcvd: Optional[Close] = None
- self.close_sent: Optional[Close] = None
- self.close_rcvd_then_sent: Optional[bool] = None
-
- # Track if an exception happened during the handshake.
- self.handshake_exc: Optional[Exception] = None
- """
- Exception to raise if the opening handshake failed.
-
- :obj:`None` if the opening handshake succeeded.
-
- """
-
- # Track if send_eof() was called.
- self.eof_sent = False
-
- # Parser state.
- self.reader = StreamReader()
- self.events: List[Event] = []
- self.writes: List[bytes] = []
- self.parser = self.parse()
- next(self.parser) # start coroutine
- self.parser_exc: Optional[Exception] = None
-
- @property
- def state(self) -> State:
- """
- WebSocket connection state.
-
- Defined in 4.1, 4.2, 7.1.3, and 7.1.4 of :rfc:`6455`.
-
- """
- return self._state
-
- @state.setter
- def state(self, state: State) -> None:
- if self.debug:
- self.logger.debug("= connection is %s", state.name)
- self._state = state
-
- @property
- def close_code(self) -> Optional[int]:
- """
- `WebSocket close code`_.
-
- .. _WebSocket close code:
- https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.5
-
- :obj:`None` if the connection isn't closed yet.
-
- """
- if self.state is not CLOSED:
- return None
- elif self.close_rcvd is None:
- return CloseCode.ABNORMAL_CLOSURE
- else:
- return self.close_rcvd.code
-
- @property
- def close_reason(self) -> Optional[str]:
- """
- `WebSocket close reason`_.
-
- .. _WebSocket close reason:
- https://www.rfc-editor.org/rfc/rfc6455.html#section-7.1.6
-
- :obj:`None` if the connection isn't closed yet.
-
- """
- if self.state is not CLOSED:
- return None
- elif self.close_rcvd is None:
- return ""
- else:
- return self.close_rcvd.reason
-
- @property
- def close_exc(self) -> ConnectionClosed:
- """
- Exception to raise when trying to interact with a closed connection.
-
- Don't raise this exception while the connection :attr:`state`
- is :attr:`~websockets.protocol.State.CLOSING`; wait until
- it's :attr:`~websockets.protocol.State.CLOSED`.
-
- Indeed, the exception includes the close code and reason, which are
- known only once the connection is closed.
-
- Raises:
- AssertionError: if the connection isn't closed yet.
-
- """
- assert self.state is CLOSED, "connection isn't closed yet"
- exc_type: Type[ConnectionClosed]
- if (
- self.close_rcvd is not None
- and self.close_sent is not None
- and self.close_rcvd.code in OK_CLOSE_CODES
- and self.close_sent.code in OK_CLOSE_CODES
- ):
- exc_type = ConnectionClosedOK
- else:
- exc_type = ConnectionClosedError
- exc: ConnectionClosed = exc_type(
- self.close_rcvd,
- self.close_sent,
- self.close_rcvd_then_sent,
- )
- # Chain to the exception raised in the parser, if any.
- exc.__cause__ = self.parser_exc
- return exc
-
- # Public methods for receiving data.
-
- def receive_data(self, data: bytes) -> None:
- """
- Receive data from the network.
-
- After calling this method:
-
- - You must call :meth:`data_to_send` and send this data to the network.
- - You should call :meth:`events_received` and process resulting events.
-
- Raises:
- EOFError: if :meth:`receive_eof` was called earlier.
-
- """
- self.reader.feed_data(data)
- next(self.parser)
-
- def receive_eof(self) -> None:
- """
- Receive the end of the data stream from the network.
-
- After calling this method:
-
- - You must call :meth:`data_to_send` and send this data to the network;
- it will return ``[b""]``, signaling the end of the stream, or ``[]``.
- - You aren't expected to call :meth:`events_received`; it won't return
- any new events.
-
- Raises:
- EOFError: if :meth:`receive_eof` was called earlier.
-
- """
- self.reader.feed_eof()
- next(self.parser)
-
- # Public methods for sending events.
-
- def send_continuation(self, data: bytes, fin: bool) -> None:
- """
- Send a `Continuation frame`_.
-
- .. _Continuation frame:
- https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
-
- Parameters:
- data: payload containing the same kind of data
- as the initial frame.
- fin: FIN bit; set it to :obj:`True` if this is the last frame
- of a fragmented message and to :obj:`False` otherwise.
-
- Raises:
- ProtocolError: if a fragmented message isn't in progress.
-
- """
- if not self.expect_continuation_frame:
- raise ProtocolError("unexpected continuation frame")
- self.expect_continuation_frame = not fin
- self.send_frame(Frame(OP_CONT, data, fin))
-
- def send_text(self, data: bytes, fin: bool = True) -> None:
- """
- Send a `Text frame`_.
-
- .. _Text frame:
- https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
-
- Parameters:
- data: payload containing text encoded with UTF-8.
- fin: FIN bit; set it to :obj:`False` if this is the first frame of
- a fragmented message.
-
- Raises:
- ProtocolError: if a fragmented message is in progress.
-
- """
- if self.expect_continuation_frame:
- raise ProtocolError("expected a continuation frame")
- self.expect_continuation_frame = not fin
- self.send_frame(Frame(OP_TEXT, data, fin))
-
- def send_binary(self, data: bytes, fin: bool = True) -> None:
- """
- Send a `Binary frame`_.
-
- .. _Binary frame:
- https://datatracker.ietf.org/doc/html/rfc6455#section-5.6
-
- Parameters:
- data: payload containing arbitrary binary data.
- fin: FIN bit; set it to :obj:`False` if this is the first frame of
- a fragmented message.
-
- Raises:
- ProtocolError: if a fragmented message is in progress.
-
- """
- if self.expect_continuation_frame:
- raise ProtocolError("expected a continuation frame")
- self.expect_continuation_frame = not fin
- self.send_frame(Frame(OP_BINARY, data, fin))
-
- def send_close(self, code: Optional[int] = None, reason: str = "") -> None:
- """
- Send a `Close frame`_.
-
- .. _Close frame:
- https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.1
-
- Parameters:
- code: close code.
- reason: close reason.
-
- Raises:
- ProtocolError: if a fragmented message is being sent, if the code
- isn't valid, or if a reason is provided without a code
-
- """
- if self.expect_continuation_frame:
- raise ProtocolError("expected a continuation frame")
- if code is None:
- if reason != "":
- raise ProtocolError("cannot send a reason without a code")
- close = Close(CloseCode.NO_STATUS_RCVD, "")
- data = b""
- else:
- close = Close(code, reason)
- data = close.serialize()
- # send_frame() guarantees that self.state is OPEN at this point.
- # 7.1.3. The WebSocket Closing Handshake is Started
- self.send_frame(Frame(OP_CLOSE, data))
- self.close_sent = close
- self.state = CLOSING
-
- def send_ping(self, data: bytes) -> None:
- """
- Send a `Ping frame`_.
-
- .. _Ping frame:
- https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.2
-
- Parameters:
- data: payload containing arbitrary binary data.
-
- """
- self.send_frame(Frame(OP_PING, data))
-
- def send_pong(self, data: bytes) -> None:
- """
- Send a `Pong frame`_.
-
- .. _Pong frame:
- https://datatracker.ietf.org/doc/html/rfc6455#section-5.5.3
-
- Parameters:
- data: payload containing arbitrary binary data.
-
- """
- self.send_frame(Frame(OP_PONG, data))
-
- def fail(self, code: int, reason: str = "") -> None:
- """
- `Fail the WebSocket connection`_.
-
- .. _Fail the WebSocket connection:
- https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.7
-
- Parameters:
- code: close code
- reason: close reason
-
- Raises:
- ProtocolError: if the code isn't valid.
- """
- # 7.1.7. Fail the WebSocket Connection
-
- # Send a close frame when the state is OPEN (a close frame was already
- # sent if it's CLOSING), except when failing the connection because
- # of an error reading from or writing to the network.
- if self.state is OPEN:
- if code != CloseCode.ABNORMAL_CLOSURE:
- close = Close(code, reason)
- data = close.serialize()
- self.send_frame(Frame(OP_CLOSE, data))
- self.close_sent = close
- self.state = CLOSING
-
- # When failing the connection, a server closes the TCP connection
- # without waiting for the client to complete the handshake, while a
- # client waits for the server to close the TCP connection, possibly
- # after sending a close frame that the client will ignore.
- if self.side is SERVER and not self.eof_sent:
- self.send_eof()
-
- # 7.1.7. Fail the WebSocket Connection "An endpoint MUST NOT continue
- # to attempt to process data(including a responding Close frame) from
- # the remote endpoint after being instructed to _Fail the WebSocket
- # Connection_."
- self.parser = self.discard()
- next(self.parser) # start coroutine
-
- # Public method for getting incoming events after receiving data.
-
- def events_received(self) -> List[Event]:
- """
- Fetch events generated from data received from the network.
-
- Call this method immediately after any of the ``receive_*()`` methods.
-
- Process resulting events, likely by passing them to the application.
-
- Returns:
- List[Event]: Events read from the connection.
- """
- events, self.events = self.events, []
- return events
-
- # Public method for getting outgoing data after receiving data or sending events.
-
- def data_to_send(self) -> List[bytes]:
- """
- Obtain data to send to the network.
-
- Call this method immediately after any of the ``receive_*()``,
- ``send_*()``, or :meth:`fail` methods.
-
- Write resulting data to the connection.
-
- The empty bytestring :data:`~websockets.protocol.SEND_EOF` signals
- the end of the data stream. When you receive it, half-close the TCP
- connection.
-
- Returns:
- List[bytes]: Data to write to the connection.
-
- """
- writes, self.writes = self.writes, []
- return writes
-
- def close_expected(self) -> bool:
- """
- Tell if the TCP connection is expected to close soon.
-
- Call this method immediately after any of the ``receive_*()``,
- ``send_close()``, or :meth:`fail` methods.
-
- If it returns :obj:`True`, schedule closing the TCP connection after a
- short timeout if the other side hasn't already closed it.
-
- Returns:
- bool: Whether the TCP connection is expected to close soon.
-
- """
- # We expect a TCP close if and only if we sent a close frame:
- # * Normal closure: once we send a close frame, we expect a TCP close:
- # server waits for client to complete the TCP closing handshake;
- # client waits for server to initiate the TCP closing handshake.
- # * Abnormal closure: we always send a close frame and the same logic
- # applies, except on EOFError where we don't send a close frame
- # because we already received the TCP close, so we don't expect it.
- # We already got a TCP Close if and only if the state is CLOSED.
- return self.state is CLOSING or self.handshake_exc is not None
-
- # Private methods for receiving data.
-
- def parse(self) -> Generator[None, None, None]:
- """
- Parse incoming data into frames.
-
- :meth:`receive_data` and :meth:`receive_eof` run this generator
- coroutine until it needs more data or reaches EOF.
-
- :meth:`parse` never raises an exception. Instead, it sets the
- :attr:`parser_exc` and yields control.
-
- """
- try:
- while True:
- if (yield from self.reader.at_eof()):
- if self.debug:
- self.logger.debug("< EOF")
- # If the WebSocket connection is closed cleanly, with a
- # closing handhshake, recv_frame() substitutes parse()
- # with discard(). This branch is reached only when the
- # connection isn't closed cleanly.
- raise EOFError("unexpected end of stream")
-
- if self.max_size is None:
- max_size = None
- elif self.cur_size is None:
- max_size = self.max_size
- else:
- max_size = self.max_size - self.cur_size
-
- # During a normal closure, execution ends here on the next
- # iteration of the loop after receiving a close frame. At
- # this point, recv_frame() replaced parse() by discard().
- frame = yield from Frame.parse(
- self.reader.read_exact,
- mask=self.side is SERVER,
- max_size=max_size,
- extensions=self.extensions,
- )
-
- if self.debug:
- self.logger.debug("< %s", frame)
-
- self.recv_frame(frame)
-
- except ProtocolError as exc:
- self.fail(CloseCode.PROTOCOL_ERROR, str(exc))
- self.parser_exc = exc
-
- except EOFError as exc:
- self.fail(CloseCode.ABNORMAL_CLOSURE, str(exc))
- self.parser_exc = exc
-
- except UnicodeDecodeError as exc:
- self.fail(CloseCode.INVALID_DATA, f"{exc.reason} at position {exc.start}")
- self.parser_exc = exc
-
- except PayloadTooBig as exc:
- self.fail(CloseCode.MESSAGE_TOO_BIG, str(exc))
- self.parser_exc = exc
-
- except Exception as exc:
- self.logger.error("parser failed", exc_info=True)
- # Don't include exception details, which may be security-sensitive.
- self.fail(CloseCode.INTERNAL_ERROR)
- self.parser_exc = exc
-
- # During an abnormal closure, execution ends here after catching an
- # exception. At this point, fail() replaced parse() by discard().
- yield
- raise AssertionError("parse() shouldn't step after error")
-
- def discard(self) -> Generator[None, None, None]:
- """
- Discard incoming data.
-
- This coroutine replaces :meth:`parse`:
-
- - after receiving a close frame, during a normal closure (1.4);
- - after sending a close frame, during an abnormal closure (7.1.7).
-
- """
- # The server close the TCP connection in the same circumstances where
- # discard() replaces parse(). The client closes the connection later,
- # after the server closes the connection or a timeout elapses.
- # (The latter case cannot be handled in this Sans-I/O layer.)
- assert (self.side is SERVER) == (self.eof_sent)
- while not (yield from self.reader.at_eof()):
- self.reader.discard()
- if self.debug:
- self.logger.debug("< EOF")
- # A server closes the TCP connection immediately, while a client
- # waits for the server to close the TCP connection.
- if self.side is CLIENT:
- self.send_eof()
- self.state = CLOSED
- # If discard() completes normally, execution ends here.
- yield
- # Once the reader reaches EOF, its feed_data/eof() methods raise an
- # error, so our receive_data/eof() methods don't step the generator.
- raise AssertionError("discard() shouldn't step after EOF")
-
- def recv_frame(self, frame: Frame) -> None:
- """
- Process an incoming frame.
-
- """
- if frame.opcode is OP_TEXT or frame.opcode is OP_BINARY:
- if self.cur_size is not None:
- raise ProtocolError("expected a continuation frame")
- if frame.fin:
- self.cur_size = None
- else:
- self.cur_size = len(frame.data)
-
- elif frame.opcode is OP_CONT:
- if self.cur_size is None:
- raise ProtocolError("unexpected continuation frame")
- if frame.fin:
- self.cur_size = None
- else:
- self.cur_size += len(frame.data)
-
- elif frame.opcode is OP_PING:
- # 5.5.2. Ping: "Upon receipt of a Ping frame, an endpoint MUST
- # send a Pong frame in response"
- pong_frame = Frame(OP_PONG, frame.data)
- self.send_frame(pong_frame)
-
- elif frame.opcode is OP_PONG:
- # 5.5.3 Pong: "A response to an unsolicited Pong frame is not
- # expected."
- pass
-
- elif frame.opcode is OP_CLOSE:
- # 7.1.5. The WebSocket Connection Close Code
- # 7.1.6. The WebSocket Connection Close Reason
- self.close_rcvd = Close.parse(frame.data)
- if self.state is CLOSING:
- assert self.close_sent is not None
- self.close_rcvd_then_sent = False
-
- if self.cur_size is not None:
- raise ProtocolError("incomplete fragmented message")
-
- # 5.5.1 Close: "If an endpoint receives a Close frame and did
- # not previously send a Close frame, the endpoint MUST send a
- # Close frame in response. (When sending a Close frame in
- # response, the endpoint typically echos the status code it
- # received.)"
-
- if self.state is OPEN:
- # Echo the original data instead of re-serializing it with
- # Close.serialize() because that fails when the close frame
- # is empty and Close.parse() synthesizes a 1005 close code.
- # The rest is identical to send_close().
- self.send_frame(Frame(OP_CLOSE, frame.data))
- self.close_sent = self.close_rcvd
- self.close_rcvd_then_sent = True
- self.state = CLOSING
-
- # 7.1.2. Start the WebSocket Closing Handshake: "Once an
- # endpoint has both sent and received a Close control frame,
- # that endpoint SHOULD _Close the WebSocket Connection_"
-
- # A server closes the TCP connection immediately, while a client
- # waits for the server to close the TCP connection.
- if self.side is SERVER:
- self.send_eof()
-
- # 1.4. Closing Handshake: "after receiving a control frame
- # indicating the connection should be closed, a peer discards
- # any further data received."
- self.parser = self.discard()
- next(self.parser) # start coroutine
-
- else:
- # This can't happen because Frame.parse() validates opcodes.
- raise AssertionError(f"unexpected opcode: {frame.opcode:02x}")
-
- self.events.append(frame)
-
- # Private methods for sending events.
-
- def send_frame(self, frame: Frame) -> None:
- if self.state is not OPEN:
- raise InvalidState(
- f"cannot write to a WebSocket in the {self.state.name} state"
- )
-
- if self.debug:
- self.logger.debug("> %s", frame)
- self.writes.append(
- frame.serialize(mask=self.side is CLIENT, extensions=self.extensions)
- )
-
- def send_eof(self) -> None:
- assert not self.eof_sent
- self.eof_sent = True
- if self.debug:
- self.logger.debug("> EOF")
- self.writes.append(SEND_EOF)
diff --git a/venv/lib/python3.11/site-packages/websockets/py.typed b/venv/lib/python3.11/site-packages/websockets/py.typed
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/websockets/py.typed
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/websockets/server.py b/venv/lib/python3.11/site-packages/websockets/server.py
deleted file mode 100644
index 1916605..0000000
--- a/venv/lib/python3.11/site-packages/websockets/server.py
+++ /dev/null
@@ -1,580 +0,0 @@
-from __future__ import annotations
-
-import base64
-import binascii
-import email.utils
-import http
-import warnings
-from typing import Any, Callable, Generator, List, Optional, Sequence, Tuple, cast
-
-from .datastructures import Headers, MultipleValuesError
-from .exceptions import (
- InvalidHandshake,
- InvalidHeader,
- InvalidHeaderValue,
- InvalidOrigin,
- InvalidStatus,
- InvalidUpgrade,
- NegotiationError,
-)
-from .extensions import Extension, ServerExtensionFactory
-from .headers import (
- build_extension,
- parse_connection,
- parse_extension,
- parse_subprotocol,
- parse_upgrade,
-)
-from .http11 import Request, Response
-from .protocol import CONNECTING, OPEN, SERVER, Protocol, State
-from .typing import (
- ConnectionOption,
- ExtensionHeader,
- LoggerLike,
- Origin,
- StatusLike,
- Subprotocol,
- UpgradeProtocol,
-)
-from .utils import accept_key
-
-
-# See #940 for why lazy_import isn't used here for backwards compatibility.
-# See #1400 for why listing compatibility imports in __all__ helps PyCharm.
-from .legacy.server import * # isort:skip # noqa: I001
-from .legacy.server import __all__ as legacy__all__
-
-
-__all__ = ["ServerProtocol"] + legacy__all__
-
-
-class ServerProtocol(Protocol):
- """
- Sans-I/O implementation of a WebSocket server connection.
-
- Args:
- origins: acceptable values of the ``Origin`` header; include
- :obj:`None` in the list if the lack of an origin is acceptable.
- This is useful for defending against Cross-Site WebSocket
- Hijacking attacks.
- extensions: list of supported extensions, in order in which they
- should be tried.
- subprotocols: list of supported subprotocols, in order of decreasing
- preference.
- select_subprotocol: Callback for selecting a subprotocol among
- those supported by the client and the server. It has the same
- signature as the :meth:`select_subprotocol` method, including a
- :class:`ServerProtocol` instance as first argument.
- state: initial state of the WebSocket connection.
- max_size: maximum size of incoming messages in bytes;
- :obj:`None` disables the limit.
- logger: logger for this connection;
- defaults to ``logging.getLogger("websockets.client")``;
- see the :doc:`logging guide <../../topics/logging>` for details.
-
- """
-
- def __init__(
- self,
- *,
- origins: Optional[Sequence[Optional[Origin]]] = None,
- extensions: Optional[Sequence[ServerExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- select_subprotocol: Optional[
- Callable[
- [ServerProtocol, Sequence[Subprotocol]],
- Optional[Subprotocol],
- ]
- ] = None,
- state: State = CONNECTING,
- max_size: Optional[int] = 2**20,
- logger: Optional[LoggerLike] = None,
- ):
- super().__init__(
- side=SERVER,
- state=state,
- max_size=max_size,
- logger=logger,
- )
- self.origins = origins
- self.available_extensions = extensions
- self.available_subprotocols = subprotocols
- if select_subprotocol is not None:
- # Bind select_subprotocol then shadow self.select_subprotocol.
- # Use setattr to work around https://github.com/python/mypy/issues/2427.
- setattr(
- self,
- "select_subprotocol",
- select_subprotocol.__get__(self, self.__class__),
- )
-
- def accept(self, request: Request) -> Response:
- """
- Create a handshake response to accept the connection.
-
- If the connection cannot be established, the handshake response
- actually rejects the handshake.
-
- You must send the handshake response with :meth:`send_response`.
-
- You may modify it before sending it, for example to add HTTP headers.
-
- Args:
- request: WebSocket handshake request event received from the client.
-
- Returns:
- WebSocket handshake response event to send to the client.
-
- """
- try:
- (
- accept_header,
- extensions_header,
- protocol_header,
- ) = self.process_request(request)
- except InvalidOrigin as exc:
- request._exception = exc
- self.handshake_exc = exc
- if self.debug:
- self.logger.debug("! invalid origin", exc_info=True)
- return self.reject(
- http.HTTPStatus.FORBIDDEN,
- f"Failed to open a WebSocket connection: {exc}.\n",
- )
- except InvalidUpgrade as exc:
- request._exception = exc
- self.handshake_exc = exc
- if self.debug:
- self.logger.debug("! invalid upgrade", exc_info=True)
- response = self.reject(
- http.HTTPStatus.UPGRADE_REQUIRED,
- (
- f"Failed to open a WebSocket connection: {exc}.\n"
- f"\n"
- f"You cannot access a WebSocket server directly "
- f"with a browser. You need a WebSocket client.\n"
- ),
- )
- response.headers["Upgrade"] = "websocket"
- return response
- except InvalidHandshake as exc:
- request._exception = exc
- self.handshake_exc = exc
- if self.debug:
- self.logger.debug("! invalid handshake", exc_info=True)
- return self.reject(
- http.HTTPStatus.BAD_REQUEST,
- f"Failed to open a WebSocket connection: {exc}.\n",
- )
- except Exception as exc:
- # Handle exceptions raised by user-provided select_subprotocol and
- # unexpected errors.
- request._exception = exc
- self.handshake_exc = exc
- self.logger.error("opening handshake failed", exc_info=True)
- return self.reject(
- http.HTTPStatus.INTERNAL_SERVER_ERROR,
- (
- "Failed to open a WebSocket connection.\n"
- "See server log for more information.\n"
- ),
- )
-
- headers = Headers()
-
- headers["Date"] = email.utils.formatdate(usegmt=True)
-
- headers["Upgrade"] = "websocket"
- headers["Connection"] = "Upgrade"
- headers["Sec-WebSocket-Accept"] = accept_header
-
- if extensions_header is not None:
- headers["Sec-WebSocket-Extensions"] = extensions_header
-
- if protocol_header is not None:
- headers["Sec-WebSocket-Protocol"] = protocol_header
-
- self.logger.info("connection open")
- return Response(101, "Switching Protocols", headers)
-
- def process_request(
- self,
- request: Request,
- ) -> Tuple[str, Optional[str], Optional[str]]:
- """
- Check a handshake request and negotiate extensions and subprotocol.
-
- This function doesn't verify that the request is an HTTP/1.1 or higher
- GET request and doesn't check the ``Host`` header. These controls are
- usually performed earlier in the HTTP request handling code. They're
- the responsibility of the caller.
-
- Args:
- request: WebSocket handshake request received from the client.
-
- Returns:
- Tuple[str, Optional[str], Optional[str]]:
- ``Sec-WebSocket-Accept``, ``Sec-WebSocket-Extensions``, and
- ``Sec-WebSocket-Protocol`` headers for the handshake response.
-
- Raises:
- InvalidHandshake: if the handshake request is invalid;
- then the server must return 400 Bad Request error.
-
- """
- headers = request.headers
-
- connection: List[ConnectionOption] = sum(
- [parse_connection(value) for value in headers.get_all("Connection")], []
- )
-
- if not any(value.lower() == "upgrade" for value in connection):
- raise InvalidUpgrade(
- "Connection", ", ".join(connection) if connection else None
- )
-
- upgrade: List[UpgradeProtocol] = sum(
- [parse_upgrade(value) for value in headers.get_all("Upgrade")], []
- )
-
- # For compatibility with non-strict implementations, ignore case when
- # checking the Upgrade header. The RFC always uses "websocket", except
- # in section 11.2. (IANA registration) where it uses "WebSocket".
- if not (len(upgrade) == 1 and upgrade[0].lower() == "websocket"):
- raise InvalidUpgrade("Upgrade", ", ".join(upgrade) if upgrade else None)
-
- try:
- key = headers["Sec-WebSocket-Key"]
- except KeyError as exc:
- raise InvalidHeader("Sec-WebSocket-Key") from exc
- except MultipleValuesError as exc:
- raise InvalidHeader(
- "Sec-WebSocket-Key", "more than one Sec-WebSocket-Key header found"
- ) from exc
-
- try:
- raw_key = base64.b64decode(key.encode(), validate=True)
- except binascii.Error as exc:
- raise InvalidHeaderValue("Sec-WebSocket-Key", key) from exc
- if len(raw_key) != 16:
- raise InvalidHeaderValue("Sec-WebSocket-Key", key)
-
- try:
- version = headers["Sec-WebSocket-Version"]
- except KeyError as exc:
- raise InvalidHeader("Sec-WebSocket-Version") from exc
- except MultipleValuesError as exc:
- raise InvalidHeader(
- "Sec-WebSocket-Version",
- "more than one Sec-WebSocket-Version header found",
- ) from exc
-
- if version != "13":
- raise InvalidHeaderValue("Sec-WebSocket-Version", version)
-
- accept_header = accept_key(key)
-
- self.origin = self.process_origin(headers)
-
- extensions_header, self.extensions = self.process_extensions(headers)
-
- protocol_header = self.subprotocol = self.process_subprotocol(headers)
-
- return (
- accept_header,
- extensions_header,
- protocol_header,
- )
-
- def process_origin(self, headers: Headers) -> Optional[Origin]:
- """
- Handle the Origin HTTP request header.
-
- Args:
- headers: WebSocket handshake request headers.
-
- Returns:
- Optional[Origin]: origin, if it is acceptable.
-
- Raises:
- InvalidHandshake: if the Origin header is invalid.
- InvalidOrigin: if the origin isn't acceptable.
-
- """
- # "The user agent MUST NOT include more than one Origin header field"
- # per https://www.rfc-editor.org/rfc/rfc6454.html#section-7.3.
- try:
- origin = cast(Optional[Origin], headers.get("Origin"))
- except MultipleValuesError as exc:
- raise InvalidHeader("Origin", "more than one Origin header found") from exc
- if self.origins is not None:
- if origin not in self.origins:
- raise InvalidOrigin(origin)
- return origin
-
- def process_extensions(
- self,
- headers: Headers,
- ) -> Tuple[Optional[str], List[Extension]]:
- """
- Handle the Sec-WebSocket-Extensions HTTP request header.
-
- Accept or reject each extension proposed in the client request.
- Negotiate parameters for accepted extensions.
-
- Per :rfc:`6455`, negotiation rules are defined by the specification of
- each extension.
-
- To provide this level of flexibility, for each extension proposed by
- the client, we check for a match with each extension available in the
- server configuration. If no match is found, the extension is ignored.
-
- If several variants of the same extension are proposed by the client,
- it may be accepted several times, which won't make sense in general.
- Extensions must implement their own requirements. For this purpose,
- the list of previously accepted extensions is provided.
-
- This process doesn't allow the server to reorder extensions. It can
- only select a subset of the extensions proposed by the client.
-
- Other requirements, for example related to mandatory extensions or the
- order of extensions, may be implemented by overriding this method.
-
- Args:
- headers: WebSocket handshake request headers.
-
- Returns:
- Tuple[Optional[str], List[Extension]]: ``Sec-WebSocket-Extensions``
- HTTP response header and list of accepted extensions.
-
- Raises:
- InvalidHandshake: if the Sec-WebSocket-Extensions header is invalid.
-
- """
- response_header_value: Optional[str] = None
-
- extension_headers: List[ExtensionHeader] = []
- accepted_extensions: List[Extension] = []
-
- header_values = headers.get_all("Sec-WebSocket-Extensions")
-
- if header_values and self.available_extensions:
- parsed_header_values: List[ExtensionHeader] = sum(
- [parse_extension(header_value) for header_value in header_values], []
- )
-
- for name, request_params in parsed_header_values:
- for ext_factory in self.available_extensions:
- # Skip non-matching extensions based on their name.
- if ext_factory.name != name:
- continue
-
- # Skip non-matching extensions based on their params.
- try:
- response_params, extension = ext_factory.process_request_params(
- request_params, accepted_extensions
- )
- except NegotiationError:
- continue
-
- # Add matching extension to the final list.
- extension_headers.append((name, response_params))
- accepted_extensions.append(extension)
-
- # Break out of the loop once we have a match.
- break
-
- # If we didn't break from the loop, no extension in our list
- # matched what the client sent. The extension is declined.
-
- # Serialize extension header.
- if extension_headers:
- response_header_value = build_extension(extension_headers)
-
- return response_header_value, accepted_extensions
-
- def process_subprotocol(self, headers: Headers) -> Optional[Subprotocol]:
- """
- Handle the Sec-WebSocket-Protocol HTTP request header.
-
- Args:
- headers: WebSocket handshake request headers.
-
- Returns:
- Optional[Subprotocol]: Subprotocol, if one was selected; this is
- also the value of the ``Sec-WebSocket-Protocol`` response header.
-
- Raises:
- InvalidHandshake: if the Sec-WebSocket-Subprotocol header is invalid.
-
- """
- subprotocols: Sequence[Subprotocol] = sum(
- [
- parse_subprotocol(header_value)
- for header_value in headers.get_all("Sec-WebSocket-Protocol")
- ],
- [],
- )
-
- return self.select_subprotocol(subprotocols)
-
- def select_subprotocol(
- self,
- subprotocols: Sequence[Subprotocol],
- ) -> Optional[Subprotocol]:
- """
- Pick a subprotocol among those offered by the client.
-
- If several subprotocols are supported by both the client and the server,
- pick the first one in the list declared the server.
-
- If the server doesn't support any subprotocols, continue without a
- subprotocol, regardless of what the client offers.
-
- If the server supports at least one subprotocol and the client doesn't
- offer any, abort the handshake with an HTTP 400 error.
-
- You provide a ``select_subprotocol`` argument to :class:`ServerProtocol`
- to override this logic. For example, you could accept the connection
- even if client doesn't offer a subprotocol, rather than reject it.
-
- Here's how to negotiate the ``chat`` subprotocol if the client supports
- it and continue without a subprotocol otherwise::
-
- def select_subprotocol(protocol, subprotocols):
- if "chat" in subprotocols:
- return "chat"
-
- Args:
- subprotocols: list of subprotocols offered by the client.
-
- Returns:
- Optional[Subprotocol]: Selected subprotocol, if a common subprotocol
- was found.
-
- :obj:`None` to continue without a subprotocol.
-
- Raises:
- NegotiationError: custom implementations may raise this exception
- to abort the handshake with an HTTP 400 error.
-
- """
- # Server doesn't offer any subprotocols.
- if not self.available_subprotocols: # None or empty list
- return None
-
- # Server offers at least one subprotocol but client doesn't offer any.
- if not subprotocols:
- raise NegotiationError("missing subprotocol")
-
- # Server and client both offer subprotocols. Look for a shared one.
- proposed_subprotocols = set(subprotocols)
- for subprotocol in self.available_subprotocols:
- if subprotocol in proposed_subprotocols:
- return subprotocol
-
- # No common subprotocol was found.
- raise NegotiationError(
- "invalid subprotocol; expected one of "
- + ", ".join(self.available_subprotocols)
- )
-
- def reject(
- self,
- status: StatusLike,
- text: str,
- ) -> Response:
- """
- Create a handshake response to reject the connection.
-
- A short plain text response is the best fallback when failing to
- establish a WebSocket connection.
-
- You must send the handshake response with :meth:`send_response`.
-
- You can modify it before sending it, for example to alter HTTP headers.
-
- Args:
- status: HTTP status code.
- text: HTTP response body; will be encoded to UTF-8.
-
- Returns:
- Response: WebSocket handshake response event to send to the client.
-
- """
- # If a user passes an int instead of a HTTPStatus, fix it automatically.
- status = http.HTTPStatus(status)
- body = text.encode()
- headers = Headers(
- [
- ("Date", email.utils.formatdate(usegmt=True)),
- ("Connection", "close"),
- ("Content-Length", str(len(body))),
- ("Content-Type", "text/plain; charset=utf-8"),
- ]
- )
- response = Response(status.value, status.phrase, headers, body)
- # When reject() is called from accept(), handshake_exc is already set.
- # If a user calls reject(), set handshake_exc to guarantee invariant:
- # "handshake_exc is None if and only if opening handshake succeeded."
- if self.handshake_exc is None:
- self.handshake_exc = InvalidStatus(response)
- self.logger.info("connection rejected (%d %s)", status.value, status.phrase)
- return response
-
- def send_response(self, response: Response) -> None:
- """
- Send a handshake response to the client.
-
- Args:
- response: WebSocket handshake response event to send.
-
- """
- if self.debug:
- code, phrase = response.status_code, response.reason_phrase
- self.logger.debug("> HTTP/1.1 %d %s", code, phrase)
- for key, value in response.headers.raw_items():
- self.logger.debug("> %s: %s", key, value)
- if response.body is not None:
- self.logger.debug("> [body] (%d bytes)", len(response.body))
-
- self.writes.append(response.serialize())
-
- if response.status_code == 101:
- assert self.state is CONNECTING
- self.state = OPEN
- else:
- self.send_eof()
- self.parser = self.discard()
- next(self.parser) # start coroutine
-
- def parse(self) -> Generator[None, None, None]:
- if self.state is CONNECTING:
- try:
- request = yield from Request.parse(
- self.reader.read_line,
- )
- except Exception as exc:
- self.handshake_exc = exc
- self.send_eof()
- self.parser = self.discard()
- next(self.parser) # start coroutine
- yield
-
- if self.debug:
- self.logger.debug("< GET %s HTTP/1.1", request.path)
- for key, value in request.headers.raw_items():
- self.logger.debug("< %s: %s", key, value)
-
- self.events.append(request)
-
- yield from super().parse()
-
-
-class ServerConnection(ServerProtocol):
- def __init__(self, *args: Any, **kwargs: Any) -> None:
- warnings.warn(
- "ServerConnection was renamed to ServerProtocol",
- DeprecationWarning,
- )
- super().__init__(*args, **kwargs)
diff --git a/venv/lib/python3.11/site-packages/websockets/speedups.c b/venv/lib/python3.11/site-packages/websockets/speedups.c
deleted file mode 100644
index a195904..0000000
--- a/venv/lib/python3.11/site-packages/websockets/speedups.c
+++ /dev/null
@@ -1,223 +0,0 @@
-/* C implementation of performance sensitive functions. */
-
-#define PY_SSIZE_T_CLEAN
-#include <Python.h>
-#include <stdint.h> /* uint8_t, uint32_t, uint64_t */
-
-#if __ARM_NEON
-#include <arm_neon.h>
-#elif __SSE2__
-#include <emmintrin.h>
-#endif
-
-static const Py_ssize_t MASK_LEN = 4;
-
-/* Similar to PyBytes_AsStringAndSize, but accepts more types */
-
-static int
-_PyBytesLike_AsStringAndSize(PyObject *obj, PyObject **tmp, char **buffer, Py_ssize_t *length)
-{
- // This supports bytes, bytearrays, and memoryview objects,
- // which are common data structures for handling byte streams.
- // websockets.framing.prepare_data() returns only these types.
- // If *tmp isn't NULL, the caller gets a new reference.
- if (PyBytes_Check(obj))
- {
- *tmp = NULL;
- *buffer = PyBytes_AS_STRING(obj);
- *length = PyBytes_GET_SIZE(obj);
- }
- else if (PyByteArray_Check(obj))
- {
- *tmp = NULL;
- *buffer = PyByteArray_AS_STRING(obj);
- *length = PyByteArray_GET_SIZE(obj);
- }
- else if (PyMemoryView_Check(obj))
- {
- *tmp = PyMemoryView_GetContiguous(obj, PyBUF_READ, 'C');
- if (*tmp == NULL)
- {
- return -1;
- }
- Py_buffer *mv_buf;
- mv_buf = PyMemoryView_GET_BUFFER(*tmp);
- *buffer = mv_buf->buf;
- *length = mv_buf->len;
- }
- else
- {
- PyErr_Format(
- PyExc_TypeError,
- "expected a bytes-like object, %.200s found",
- Py_TYPE(obj)->tp_name);
- return -1;
- }
-
- return 0;
-}
-
-/* C implementation of websockets.utils.apply_mask */
-
-static PyObject *
-apply_mask(PyObject *self, PyObject *args, PyObject *kwds)
-{
-
- // In order to support various bytes-like types, accept any Python object.
-
- static char *kwlist[] = {"data", "mask", NULL};
- PyObject *input_obj;
- PyObject *mask_obj;
-
- // A pointer to a char * + length will be extracted from the data and mask
- // arguments, possibly via a Py_buffer.
-
- PyObject *input_tmp = NULL;
- char *input;
- Py_ssize_t input_len;
- PyObject *mask_tmp = NULL;
- char *mask;
- Py_ssize_t mask_len;
-
- // Initialize a PyBytesObject then get a pointer to the underlying char *
- // in order to avoid an extra memory copy in PyBytes_FromStringAndSize.
-
- PyObject *result = NULL;
- char *output;
-
- // Other variables.
-
- Py_ssize_t i = 0;
-
- // Parse inputs.
-
- if (!PyArg_ParseTupleAndKeywords(
- args, kwds, "OO", kwlist, &input_obj, &mask_obj))
- {
- goto exit;
- }
-
- if (_PyBytesLike_AsStringAndSize(input_obj, &input_tmp, &input, &input_len) == -1)
- {
- goto exit;
- }
-
- if (_PyBytesLike_AsStringAndSize(mask_obj, &mask_tmp, &mask, &mask_len) == -1)
- {
- goto exit;
- }
-
- if (mask_len != MASK_LEN)
- {
- PyErr_SetString(PyExc_ValueError, "mask must contain 4 bytes");
- goto exit;
- }
-
- // Create output.
-
- result = PyBytes_FromStringAndSize(NULL, input_len);
- if (result == NULL)
- {
- goto exit;
- }
-
- // Since we just created result, we don't need error checks.
- output = PyBytes_AS_STRING(result);
-
- // Perform the masking operation.
-
- // Apparently GCC cannot figure out the following optimizations by itself.
-
- // We need a new scope for MSVC 2010 (non C99 friendly)
- {
-#if __ARM_NEON
-
- // With NEON support, XOR by blocks of 16 bytes = 128 bits.
-
- Py_ssize_t input_len_128 = input_len & ~15;
- uint8x16_t mask_128 = vreinterpretq_u8_u32(vdupq_n_u32(*(uint32_t *)mask));
-
- for (; i < input_len_128; i += 16)
- {
- uint8x16_t in_128 = vld1q_u8((uint8_t *)(input + i));
- uint8x16_t out_128 = veorq_u8(in_128, mask_128);
- vst1q_u8((uint8_t *)(output + i), out_128);
- }
-
-#elif __SSE2__
-
- // With SSE2 support, XOR by blocks of 16 bytes = 128 bits.
-
- // Since we cannot control the 16-bytes alignment of input and output
- // buffers, we rely on loadu/storeu rather than load/store.
-
- Py_ssize_t input_len_128 = input_len & ~15;
- __m128i mask_128 = _mm_set1_epi32(*(uint32_t *)mask);
-
- for (; i < input_len_128; i += 16)
- {
- __m128i in_128 = _mm_loadu_si128((__m128i *)(input + i));
- __m128i out_128 = _mm_xor_si128(in_128, mask_128);
- _mm_storeu_si128((__m128i *)(output + i), out_128);
- }
-
-#else
-
- // Without SSE2 support, XOR by blocks of 8 bytes = 64 bits.
-
- // We assume the memory allocator aligns everything on 8 bytes boundaries.
-
- Py_ssize_t input_len_64 = input_len & ~7;
- uint32_t mask_32 = *(uint32_t *)mask;
- uint64_t mask_64 = ((uint64_t)mask_32 << 32) | (uint64_t)mask_32;
-
- for (; i < input_len_64; i += 8)
- {
- *(uint64_t *)(output + i) = *(uint64_t *)(input + i) ^ mask_64;
- }
-
-#endif
- }
-
- // XOR the remainder of the input byte by byte.
-
- for (; i < input_len; i++)
- {
- output[i] = input[i] ^ mask[i & (MASK_LEN - 1)];
- }
-
-exit:
- Py_XDECREF(input_tmp);
- Py_XDECREF(mask_tmp);
- return result;
-
-}
-
-static PyMethodDef speedups_methods[] = {
- {
- "apply_mask",
- (PyCFunction)apply_mask,
- METH_VARARGS | METH_KEYWORDS,
- "Apply masking to the data of a WebSocket message.",
- },
- {NULL, NULL, 0, NULL}, /* Sentinel */
-};
-
-static struct PyModuleDef speedups_module = {
- PyModuleDef_HEAD_INIT,
- "websocket.speedups", /* m_name */
- "C implementation of performance sensitive functions.",
- /* m_doc */
- -1, /* m_size */
- speedups_methods, /* m_methods */
- NULL,
- NULL,
- NULL,
- NULL
-};
-
-PyMODINIT_FUNC
-PyInit_speedups(void)
-{
- return PyModule_Create(&speedups_module);
-}
diff --git a/venv/lib/python3.11/site-packages/websockets/speedups.cpython-311-x86_64-linux-gnu.so b/venv/lib/python3.11/site-packages/websockets/speedups.cpython-311-x86_64-linux-gnu.so
deleted file mode 100755
index 6a15c5d..0000000
--- a/venv/lib/python3.11/site-packages/websockets/speedups.cpython-311-x86_64-linux-gnu.so
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/streams.py b/venv/lib/python3.11/site-packages/websockets/streams.py
deleted file mode 100644
index f861d4b..0000000
--- a/venv/lib/python3.11/site-packages/websockets/streams.py
+++ /dev/null
@@ -1,151 +0,0 @@
-from __future__ import annotations
-
-from typing import Generator
-
-
-class StreamReader:
- """
- Generator-based stream reader.
-
- This class doesn't support concurrent calls to :meth:`read_line`,
- :meth:`read_exact`, or :meth:`read_to_eof`. Make sure calls are
- serialized.
-
- """
-
- def __init__(self) -> None:
- self.buffer = bytearray()
- self.eof = False
-
- def read_line(self, m: int) -> Generator[None, None, bytes]:
- """
- Read a LF-terminated line from the stream.
-
- This is a generator-based coroutine.
-
- The return value includes the LF character.
-
- Args:
- m: maximum number bytes to read; this is a security limit.
-
- Raises:
- EOFError: if the stream ends without a LF.
- RuntimeError: if the stream ends in more than ``m`` bytes.
-
- """
- n = 0 # number of bytes to read
- p = 0 # number of bytes without a newline
- while True:
- n = self.buffer.find(b"\n", p) + 1
- if n > 0:
- break
- p = len(self.buffer)
- if p > m:
- raise RuntimeError(f"read {p} bytes, expected no more than {m} bytes")
- if self.eof:
- raise EOFError(f"stream ends after {p} bytes, before end of line")
- yield
- if n > m:
- raise RuntimeError(f"read {n} bytes, expected no more than {m} bytes")
- r = self.buffer[:n]
- del self.buffer[:n]
- return r
-
- def read_exact(self, n: int) -> Generator[None, None, bytes]:
- """
- Read a given number of bytes from the stream.
-
- This is a generator-based coroutine.
-
- Args:
- n: how many bytes to read.
-
- Raises:
- EOFError: if the stream ends in less than ``n`` bytes.
-
- """
- assert n >= 0
- while len(self.buffer) < n:
- if self.eof:
- p = len(self.buffer)
- raise EOFError(f"stream ends after {p} bytes, expected {n} bytes")
- yield
- r = self.buffer[:n]
- del self.buffer[:n]
- return r
-
- def read_to_eof(self, m: int) -> Generator[None, None, bytes]:
- """
- Read all bytes from the stream.
-
- This is a generator-based coroutine.
-
- Args:
- m: maximum number bytes to read; this is a security limit.
-
- Raises:
- RuntimeError: if the stream ends in more than ``m`` bytes.
-
- """
- while not self.eof:
- p = len(self.buffer)
- if p > m:
- raise RuntimeError(f"read {p} bytes, expected no more than {m} bytes")
- yield
- r = self.buffer[:]
- del self.buffer[:]
- return r
-
- def at_eof(self) -> Generator[None, None, bool]:
- """
- Tell whether the stream has ended and all data was read.
-
- This is a generator-based coroutine.
-
- """
- while True:
- if self.buffer:
- return False
- if self.eof:
- return True
- # When all data was read but the stream hasn't ended, we can't
- # tell if until either feed_data() or feed_eof() is called.
- yield
-
- def feed_data(self, data: bytes) -> None:
- """
- Write data to the stream.
-
- :meth:`feed_data` cannot be called after :meth:`feed_eof`.
-
- Args:
- data: data to write.
-
- Raises:
- EOFError: if the stream has ended.
-
- """
- if self.eof:
- raise EOFError("stream ended")
- self.buffer += data
-
- def feed_eof(self) -> None:
- """
- End the stream.
-
- :meth:`feed_eof` cannot be called more than once.
-
- Raises:
- EOFError: if the stream has ended.
-
- """
- if self.eof:
- raise EOFError("stream ended")
- self.eof = True
-
- def discard(self) -> None:
- """
- Discard all buffered data, but don't end the stream.
-
- """
- del self.buffer[:]
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__init__.py b/venv/lib/python3.11/site-packages/websockets/sync/__init__.py
deleted file mode 100644
index e69de29..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__init__.py
+++ /dev/null
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/__init__.cpython-311.pyc
deleted file mode 100644
index 7d70e67..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/__init__.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/client.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/client.cpython-311.pyc
deleted file mode 100644
index 79bd716..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/client.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/connection.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/connection.cpython-311.pyc
deleted file mode 100644
index ae821c1..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/connection.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/messages.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/messages.cpython-311.pyc
deleted file mode 100644
index e9aadf8..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/messages.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/server.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/server.cpython-311.pyc
deleted file mode 100644
index 4d8d13f..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/server.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/utils.cpython-311.pyc b/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/utils.cpython-311.pyc
deleted file mode 100644
index 683704e..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/__pycache__/utils.cpython-311.pyc
+++ /dev/null
Binary files differ
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/client.py b/venv/lib/python3.11/site-packages/websockets/sync/client.py
deleted file mode 100644
index 087ff5f..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/client.py
+++ /dev/null
@@ -1,328 +0,0 @@
-from __future__ import annotations
-
-import socket
-import ssl
-import threading
-from typing import Any, Optional, Sequence, Type
-
-from ..client import ClientProtocol
-from ..datastructures import HeadersLike
-from ..extensions.base import ClientExtensionFactory
-from ..extensions.permessage_deflate import enable_client_permessage_deflate
-from ..headers import validate_subprotocols
-from ..http import USER_AGENT
-from ..http11 import Response
-from ..protocol import CONNECTING, OPEN, Event
-from ..typing import LoggerLike, Origin, Subprotocol
-from ..uri import parse_uri
-from .connection import Connection
-from .utils import Deadline
-
-
-__all__ = ["connect", "unix_connect", "ClientConnection"]
-
-
-class ClientConnection(Connection):
- """
- Threaded implementation of a WebSocket client connection.
-
- :class:`ClientConnection` provides :meth:`recv` and :meth:`send` methods for
- receiving and sending messages.
-
- It supports iteration to receive messages::
-
- for message in websocket:
- process(message)
-
- The iterator exits normally when the connection is closed with close code
- 1000 (OK) or 1001 (going away) or without a close code. It raises a
- :exc:`~websockets.exceptions.ConnectionClosedError` when the connection is
- closed with any other code.
-
- Args:
- socket: Socket connected to a WebSocket server.
- protocol: Sans-I/O connection.
- close_timeout: Timeout for closing the connection in seconds.
-
- """
-
- def __init__(
- self,
- socket: socket.socket,
- protocol: ClientProtocol,
- *,
- close_timeout: Optional[float] = 10,
- ) -> None:
- self.protocol: ClientProtocol
- self.response_rcvd = threading.Event()
- super().__init__(
- socket,
- protocol,
- close_timeout=close_timeout,
- )
-
- def handshake(
- self,
- additional_headers: Optional[HeadersLike] = None,
- user_agent_header: Optional[str] = USER_AGENT,
- timeout: Optional[float] = None,
- ) -> None:
- """
- Perform the opening handshake.
-
- """
- with self.send_context(expected_state=CONNECTING):
- self.request = self.protocol.connect()
- if additional_headers is not None:
- self.request.headers.update(additional_headers)
- if user_agent_header is not None:
- self.request.headers["User-Agent"] = user_agent_header
- self.protocol.send_request(self.request)
-
- if not self.response_rcvd.wait(timeout):
- self.close_socket()
- self.recv_events_thread.join()
- raise TimeoutError("timed out during handshake")
-
- if self.response is None:
- self.close_socket()
- self.recv_events_thread.join()
- raise ConnectionError("connection closed during handshake")
-
- if self.protocol.state is not OPEN:
- self.recv_events_thread.join(self.close_timeout)
- self.close_socket()
- self.recv_events_thread.join()
-
- if self.protocol.handshake_exc is not None:
- raise self.protocol.handshake_exc
-
- def process_event(self, event: Event) -> None:
- """
- Process one incoming event.
-
- """
- # First event - handshake response.
- if self.response is None:
- assert isinstance(event, Response)
- self.response = event
- self.response_rcvd.set()
- # Later events - frames.
- else:
- super().process_event(event)
-
- def recv_events(self) -> None:
- """
- Read incoming data from the socket and process events.
-
- """
- try:
- super().recv_events()
- finally:
- # If the connection is closed during the handshake, unblock it.
- self.response_rcvd.set()
-
-
-def connect(
- uri: str,
- *,
- # TCP/TLS — unix and path are only for unix_connect()
- sock: Optional[socket.socket] = None,
- ssl_context: Optional[ssl.SSLContext] = None,
- server_hostname: Optional[str] = None,
- unix: bool = False,
- path: Optional[str] = None,
- # WebSocket
- origin: Optional[Origin] = None,
- extensions: Optional[Sequence[ClientExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- additional_headers: Optional[HeadersLike] = None,
- user_agent_header: Optional[str] = USER_AGENT,
- compression: Optional[str] = "deflate",
- # Timeouts
- open_timeout: Optional[float] = 10,
- close_timeout: Optional[float] = 10,
- # Limits
- max_size: Optional[int] = 2**20,
- # Logging
- logger: Optional[LoggerLike] = None,
- # Escape hatch for advanced customization
- create_connection: Optional[Type[ClientConnection]] = None,
-) -> ClientConnection:
- """
- Connect to the WebSocket server at ``uri``.
-
- This function returns a :class:`ClientConnection` instance, which you can
- use to send and receive messages.
-
- :func:`connect` may be used as a context manager::
-
- async with websockets.sync.client.connect(...) as websocket:
- ...
-
- The connection is closed automatically when exiting the context.
-
- Args:
- uri: URI of the WebSocket server.
- sock: Preexisting TCP socket. ``sock`` overrides the host and port
- from ``uri``. You may call :func:`socket.create_connection` to
- create a suitable TCP socket.
- ssl_context: Configuration for enabling TLS on the connection.
- server_hostname: Host name for the TLS handshake. ``server_hostname``
- overrides the host name from ``uri``.
- origin: Value of the ``Origin`` header, for servers that require it.
- extensions: List of supported extensions, in order in which they
- should be negotiated and run.
- subprotocols: List of supported subprotocols, in order of decreasing
- preference.
- additional_headers (HeadersLike | None): Arbitrary HTTP headers to add
- to the handshake request.
- user_agent_header: Value of the ``User-Agent`` request header.
- It defaults to ``"Python/x.y.z websockets/X.Y"``.
- Setting it to :obj:`None` removes the header.
- compression: The "permessage-deflate" extension is enabled by default.
- Set ``compression`` to :obj:`None` to disable it. See the
- :doc:`compression guide <../../topics/compression>` for details.
- open_timeout: Timeout for opening the connection in seconds.
- :obj:`None` disables the timeout.
- close_timeout: Timeout for closing the connection in seconds.
- :obj:`None` disables the timeout.
- max_size: Maximum size of incoming messages in bytes.
- :obj:`None` disables the limit.
- logger: Logger for this client.
- It defaults to ``logging.getLogger("websockets.client")``.
- See the :doc:`logging guide <../../topics/logging>` for details.
- create_connection: Factory for the :class:`ClientConnection` managing
- the connection. Set it to a wrapper or a subclass to customize
- connection handling.
-
- Raises:
- InvalidURI: If ``uri`` isn't a valid WebSocket URI.
- OSError: If the TCP connection fails.
- InvalidHandshake: If the opening handshake fails.
- TimeoutError: If the opening handshake times out.
-
- """
-
- # Process parameters
-
- wsuri = parse_uri(uri)
- if not wsuri.secure and ssl_context is not None:
- raise TypeError("ssl_context argument is incompatible with a ws:// URI")
-
- if unix:
- if path is None and sock is None:
- raise TypeError("missing path argument")
- elif path is not None and sock is not None:
- raise TypeError("path and sock arguments are incompatible")
- else:
- assert path is None # private argument, only set by unix_connect()
-
- if subprotocols is not None:
- validate_subprotocols(subprotocols)
-
- if compression == "deflate":
- extensions = enable_client_permessage_deflate(extensions)
- elif compression is not None:
- raise ValueError(f"unsupported compression: {compression}")
-
- # Calculate timeouts on the TCP, TLS, and WebSocket handshakes.
- # The TCP and TLS timeouts must be set on the socket, then removed
- # to avoid conflicting with the WebSocket timeout in handshake().
- deadline = Deadline(open_timeout)
-
- if create_connection is None:
- create_connection = ClientConnection
-
- try:
- # Connect socket
-
- if sock is None:
- if unix:
- sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
- sock.settimeout(deadline.timeout())
- assert path is not None # validated above -- this is for mpypy
- sock.connect(path)
- else:
- sock = socket.create_connection(
- (wsuri.host, wsuri.port),
- deadline.timeout(),
- )
- sock.settimeout(None)
-
- # Disable Nagle algorithm
-
- if not unix:
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
-
- # Initialize TLS wrapper and perform TLS handshake
-
- if wsuri.secure:
- if ssl_context is None:
- ssl_context = ssl.create_default_context()
- if server_hostname is None:
- server_hostname = wsuri.host
- sock.settimeout(deadline.timeout())
- sock = ssl_context.wrap_socket(sock, server_hostname=server_hostname)
- sock.settimeout(None)
-
- # Initialize WebSocket connection
-
- protocol = ClientProtocol(
- wsuri,
- origin=origin,
- extensions=extensions,
- subprotocols=subprotocols,
- state=CONNECTING,
- max_size=max_size,
- logger=logger,
- )
-
- # Initialize WebSocket protocol
-
- connection = create_connection(
- sock,
- protocol,
- close_timeout=close_timeout,
- )
- # On failure, handshake() closes the socket and raises an exception.
- connection.handshake(
- additional_headers,
- user_agent_header,
- deadline.timeout(),
- )
-
- except Exception:
- if sock is not None:
- sock.close()
- raise
-
- return connection
-
-
-def unix_connect(
- path: Optional[str] = None,
- uri: Optional[str] = None,
- **kwargs: Any,
-) -> ClientConnection:
- """
- Connect to a WebSocket server listening on a Unix socket.
-
- This function is identical to :func:`connect`, except for the additional
- ``path`` argument. It's only available on Unix.
-
- It's mainly useful for debugging servers listening on Unix sockets.
-
- Args:
- path: File system path to the Unix socket.
- uri: URI of the WebSocket server. ``uri`` defaults to
- ``ws://localhost/`` or, when a ``ssl_context`` is provided, to
- ``wss://localhost/``.
-
- """
- if uri is None:
- if kwargs.get("ssl_context") is None:
- uri = "ws://localhost/"
- else:
- uri = "wss://localhost/"
- return connect(uri=uri, unix=True, path=path, **kwargs)
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/connection.py b/venv/lib/python3.11/site-packages/websockets/sync/connection.py
deleted file mode 100644
index 4a8879e..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/connection.py
+++ /dev/null
@@ -1,773 +0,0 @@
-from __future__ import annotations
-
-import contextlib
-import logging
-import random
-import socket
-import struct
-import threading
-import uuid
-from types import TracebackType
-from typing import Any, Dict, Iterable, Iterator, Mapping, Optional, Type, Union
-
-from ..exceptions import ConnectionClosed, ConnectionClosedOK, ProtocolError
-from ..frames import DATA_OPCODES, BytesLike, CloseCode, Frame, Opcode, prepare_ctrl
-from ..http11 import Request, Response
-from ..protocol import CLOSED, OPEN, Event, Protocol, State
-from ..typing import Data, LoggerLike, Subprotocol
-from .messages import Assembler
-from .utils import Deadline
-
-
-__all__ = ["Connection"]
-
-logger = logging.getLogger(__name__)
-
-
-class Connection:
- """
- Threaded implementation of a WebSocket connection.
-
- :class:`Connection` provides APIs shared between WebSocket servers and
- clients.
-
- You shouldn't use it directly. Instead, use
- :class:`~websockets.sync.client.ClientConnection` or
- :class:`~websockets.sync.server.ServerConnection`.
-
- """
-
- recv_bufsize = 65536
-
- def __init__(
- self,
- socket: socket.socket,
- protocol: Protocol,
- *,
- close_timeout: Optional[float] = 10,
- ) -> None:
- self.socket = socket
- self.protocol = protocol
- self.close_timeout = close_timeout
-
- # Inject reference to this instance in the protocol's logger.
- self.protocol.logger = logging.LoggerAdapter(
- self.protocol.logger,
- {"websocket": self},
- )
-
- # Copy attributes from the protocol for convenience.
- self.id: uuid.UUID = self.protocol.id
- """Unique identifier of the connection. Useful in logs."""
- self.logger: LoggerLike = self.protocol.logger
- """Logger for this connection."""
- self.debug = self.protocol.debug
-
- # HTTP handshake request and response.
- self.request: Optional[Request] = None
- """Opening handshake request."""
- self.response: Optional[Response] = None
- """Opening handshake response."""
-
- # Mutex serializing interactions with the protocol.
- self.protocol_mutex = threading.Lock()
-
- # Assembler turning frames into messages and serializing reads.
- self.recv_messages = Assembler()
-
- # Whether we are busy sending a fragmented message.
- self.send_in_progress = False
-
- # Deadline for the closing handshake.
- self.close_deadline: Optional[Deadline] = None
-
- # Mapping of ping IDs to pong waiters, in chronological order.
- self.pings: Dict[bytes, threading.Event] = {}
-
- # Receiving events from the socket.
- self.recv_events_thread = threading.Thread(target=self.recv_events)
- self.recv_events_thread.start()
-
- # Exception raised in recv_events, to be chained to ConnectionClosed
- # in the user thread in order to show why the TCP connection dropped.
- self.recv_events_exc: Optional[BaseException] = None
-
- # Public attributes
-
- @property
- def local_address(self) -> Any:
- """
- Local address of the connection.
-
- For IPv4 connections, this is a ``(host, port)`` tuple.
-
- The format of the address depends on the address family.
- See :meth:`~socket.socket.getsockname`.
-
- """
- return self.socket.getsockname()
-
- @property
- def remote_address(self) -> Any:
- """
- Remote address of the connection.
-
- For IPv4 connections, this is a ``(host, port)`` tuple.
-
- The format of the address depends on the address family.
- See :meth:`~socket.socket.getpeername`.
-
- """
- return self.socket.getpeername()
-
- @property
- def subprotocol(self) -> Optional[Subprotocol]:
- """
- Subprotocol negotiated during the opening handshake.
-
- :obj:`None` if no subprotocol was negotiated.
-
- """
- return self.protocol.subprotocol
-
- # Public methods
-
- def __enter__(self) -> Connection:
- return self
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_value: Optional[BaseException],
- traceback: Optional[TracebackType],
- ) -> None:
- if exc_type is None:
- self.close()
- else:
- self.close(CloseCode.INTERNAL_ERROR)
-
- def __iter__(self) -> Iterator[Data]:
- """
- Iterate on incoming messages.
-
- The iterator calls :meth:`recv` and yields messages in an infinite loop.
-
- It exits when the connection is closed normally. It raises a
- :exc:`~websockets.exceptions.ConnectionClosedError` exception after a
- protocol error or a network failure.
-
- """
- try:
- while True:
- yield self.recv()
- except ConnectionClosedOK:
- return
-
- def recv(self, timeout: Optional[float] = None) -> Data:
- """
- Receive the next message.
-
- When the connection is closed, :meth:`recv` raises
- :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it raises
- :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal closure
- and :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
- error or a network failure. This is how you detect the end of the
- message stream.
-
- If ``timeout`` is :obj:`None`, block until a message is received. If
- ``timeout`` is set and no message is received within ``timeout``
- seconds, raise :exc:`TimeoutError`. Set ``timeout`` to ``0`` to check if
- a message was already received.
-
- If the message is fragmented, wait until all fragments are received,
- reassemble them, and return the whole message.
-
- Returns:
- A string (:class:`str`) for a Text_ frame or a bytestring
- (:class:`bytes`) for a Binary_ frame.
-
- .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
- .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
- Raises:
- ConnectionClosed: When the connection is closed.
- RuntimeError: If two threads call :meth:`recv` or
- :meth:`recv_streaming` concurrently.
-
- """
- try:
- return self.recv_messages.get(timeout)
- except EOFError:
- raise self.protocol.close_exc from self.recv_events_exc
- except RuntimeError:
- raise RuntimeError(
- "cannot call recv while another thread "
- "is already running recv or recv_streaming"
- ) from None
-
- def recv_streaming(self) -> Iterator[Data]:
- """
- Receive the next message frame by frame.
-
- If the message is fragmented, yield each fragment as it is received.
- The iterator must be fully consumed, or else the connection will become
- unusable.
-
- :meth:`recv_streaming` raises the same exceptions as :meth:`recv`.
-
- Returns:
- An iterator of strings (:class:`str`) for a Text_ frame or
- bytestrings (:class:`bytes`) for a Binary_ frame.
-
- .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
- .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
- Raises:
- ConnectionClosed: When the connection is closed.
- RuntimeError: If two threads call :meth:`recv` or
- :meth:`recv_streaming` concurrently.
-
- """
- try:
- yield from self.recv_messages.get_iter()
- except EOFError:
- raise self.protocol.close_exc from self.recv_events_exc
- except RuntimeError:
- raise RuntimeError(
- "cannot call recv_streaming while another thread "
- "is already running recv or recv_streaming"
- ) from None
-
- def send(self, message: Union[Data, Iterable[Data]]) -> None:
- """
- Send a message.
-
- A string (:class:`str`) is sent as a Text_ frame. A bytestring or
- bytes-like object (:class:`bytes`, :class:`bytearray`, or
- :class:`memoryview`) is sent as a Binary_ frame.
-
- .. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
- .. _Binary: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
- :meth:`send` also accepts an iterable of strings, bytestrings, or
- bytes-like objects to enable fragmentation_. Each item is treated as a
- message fragment and sent in its own frame. All items must be of the
- same type, or else :meth:`send` will raise a :exc:`TypeError` and the
- connection will be closed.
-
- .. _fragmentation: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.4
-
- :meth:`send` rejects dict-like objects because this is often an error.
- (If you really want to send the keys of a dict-like object as fragments,
- call its :meth:`~dict.keys` method and pass the result to :meth:`send`.)
-
- When the connection is closed, :meth:`send` raises
- :exc:`~websockets.exceptions.ConnectionClosed`. Specifically, it
- raises :exc:`~websockets.exceptions.ConnectionClosedOK` after a normal
- connection closure and
- :exc:`~websockets.exceptions.ConnectionClosedError` after a protocol
- error or a network failure.
-
- Args:
- message: Message to send.
-
- Raises:
- ConnectionClosed: When the connection is closed.
- RuntimeError: If a connection is busy sending a fragmented message.
- TypeError: If ``message`` doesn't have a supported type.
-
- """
- # Unfragmented message -- this case must be handled first because
- # strings and bytes-like objects are iterable.
-
- if isinstance(message, str):
- with self.send_context():
- if self.send_in_progress:
- raise RuntimeError(
- "cannot call send while another thread "
- "is already running send"
- )
- self.protocol.send_text(message.encode("utf-8"))
-
- elif isinstance(message, BytesLike):
- with self.send_context():
- if self.send_in_progress:
- raise RuntimeError(
- "cannot call send while another thread "
- "is already running send"
- )
- self.protocol.send_binary(message)
-
- # Catch a common mistake -- passing a dict to send().
-
- elif isinstance(message, Mapping):
- raise TypeError("data is a dict-like object")
-
- # Fragmented message -- regular iterator.
-
- elif isinstance(message, Iterable):
- chunks = iter(message)
- try:
- chunk = next(chunks)
- except StopIteration:
- return
-
- try:
- # First fragment.
- if isinstance(chunk, str):
- text = True
- with self.send_context():
- if self.send_in_progress:
- raise RuntimeError(
- "cannot call send while another thread "
- "is already running send"
- )
- self.send_in_progress = True
- self.protocol.send_text(
- chunk.encode("utf-8"),
- fin=False,
- )
- elif isinstance(chunk, BytesLike):
- text = False
- with self.send_context():
- if self.send_in_progress:
- raise RuntimeError(
- "cannot call send while another thread "
- "is already running send"
- )
- self.send_in_progress = True
- self.protocol.send_binary(
- chunk,
- fin=False,
- )
- else:
- raise TypeError("data iterable must contain bytes or str")
-
- # Other fragments
- for chunk in chunks:
- if isinstance(chunk, str) and text:
- with self.send_context():
- assert self.send_in_progress
- self.protocol.send_continuation(
- chunk.encode("utf-8"),
- fin=False,
- )
- elif isinstance(chunk, BytesLike) and not text:
- with self.send_context():
- assert self.send_in_progress
- self.protocol.send_continuation(
- chunk,
- fin=False,
- )
- else:
- raise TypeError("data iterable must contain uniform types")
-
- # Final fragment.
- with self.send_context():
- self.protocol.send_continuation(b"", fin=True)
- self.send_in_progress = False
-
- except RuntimeError:
- # We didn't start sending a fragmented message.
- raise
-
- except Exception:
- # We're half-way through a fragmented message and we can't
- # complete it. This makes the connection unusable.
- with self.send_context():
- self.protocol.fail(
- CloseCode.INTERNAL_ERROR,
- "error in fragmented message",
- )
- raise
-
- else:
- raise TypeError("data must be bytes, str, or iterable")
-
- def close(self, code: int = CloseCode.NORMAL_CLOSURE, reason: str = "") -> None:
- """
- Perform the closing handshake.
-
- :meth:`close` waits for the other end to complete the handshake, for the
- TCP connection to terminate, and for all incoming messages to be read
- with :meth:`recv`.
-
- :meth:`close` is idempotent: it doesn't do anything once the
- connection is closed.
-
- Args:
- code: WebSocket close code.
- reason: WebSocket close reason.
-
- """
- try:
- # The context manager takes care of waiting for the TCP connection
- # to terminate after calling a method that sends a close frame.
- with self.send_context():
- if self.send_in_progress:
- self.protocol.fail(
- CloseCode.INTERNAL_ERROR,
- "close during fragmented message",
- )
- else:
- self.protocol.send_close(code, reason)
- except ConnectionClosed:
- # Ignore ConnectionClosed exceptions raised from send_context().
- # They mean that the connection is closed, which was the goal.
- pass
-
- def ping(self, data: Optional[Data] = None) -> threading.Event:
- """
- Send a Ping_.
-
- .. _Ping: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.2
-
- A ping may serve as a keepalive or as a check that the remote endpoint
- received all messages up to this point
-
- Args:
- data: Payload of the ping. A :class:`str` will be encoded to UTF-8.
- If ``data`` is :obj:`None`, the payload is four random bytes.
-
- Returns:
- An event that will be set when the corresponding pong is received.
- You can ignore it if you don't intend to wait.
-
- ::
-
- pong_event = ws.ping()
- pong_event.wait() # only if you want to wait for the pong
-
- Raises:
- ConnectionClosed: When the connection is closed.
- RuntimeError: If another ping was sent with the same data and
- the corresponding pong wasn't received yet.
-
- """
- if data is not None:
- data = prepare_ctrl(data)
-
- with self.send_context():
- # Protect against duplicates if a payload is explicitly set.
- if data in self.pings:
- raise RuntimeError("already waiting for a pong with the same data")
-
- # Generate a unique random payload otherwise.
- while data is None or data in self.pings:
- data = struct.pack("!I", random.getrandbits(32))
-
- pong_waiter = threading.Event()
- self.pings[data] = pong_waiter
- self.protocol.send_ping(data)
- return pong_waiter
-
- def pong(self, data: Data = b"") -> None:
- """
- Send a Pong_.
-
- .. _Pong: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.5.3
-
- An unsolicited pong may serve as a unidirectional heartbeat.
-
- Args:
- data: Payload of the pong. A :class:`str` will be encoded to UTF-8.
-
- Raises:
- ConnectionClosed: When the connection is closed.
-
- """
- data = prepare_ctrl(data)
-
- with self.send_context():
- self.protocol.send_pong(data)
-
- # Private methods
-
- def process_event(self, event: Event) -> None:
- """
- Process one incoming event.
-
- This method is overridden in subclasses to handle the handshake.
-
- """
- assert isinstance(event, Frame)
- if event.opcode in DATA_OPCODES:
- self.recv_messages.put(event)
-
- if event.opcode is Opcode.PONG:
- self.acknowledge_pings(bytes(event.data))
-
- def acknowledge_pings(self, data: bytes) -> None:
- """
- Acknowledge pings when receiving a pong.
-
- """
- with self.protocol_mutex:
- # Ignore unsolicited pong.
- if data not in self.pings:
- return
- # Sending a pong for only the most recent ping is legal.
- # Acknowledge all previous pings too in that case.
- ping_id = None
- ping_ids = []
- for ping_id, ping in self.pings.items():
- ping_ids.append(ping_id)
- ping.set()
- if ping_id == data:
- break
- else:
- raise AssertionError("solicited pong not found in pings")
- # Remove acknowledged pings from self.pings.
- for ping_id in ping_ids:
- del self.pings[ping_id]
-
- def recv_events(self) -> None:
- """
- Read incoming data from the socket and process events.
-
- Run this method in a thread as long as the connection is alive.
-
- ``recv_events()`` exits immediately when the ``self.socket`` is closed.
-
- """
- try:
- while True:
- try:
- if self.close_deadline is not None:
- self.socket.settimeout(self.close_deadline.timeout())
- data = self.socket.recv(self.recv_bufsize)
- except Exception as exc:
- if self.debug:
- self.logger.debug("error while receiving data", exc_info=True)
- # When the closing handshake is initiated by our side,
- # recv() may block until send_context() closes the socket.
- # In that case, send_context() already set recv_events_exc.
- # Calling set_recv_events_exc() avoids overwriting it.
- with self.protocol_mutex:
- self.set_recv_events_exc(exc)
- break
-
- if data == b"":
- break
-
- # Acquire the connection lock.
- with self.protocol_mutex:
- # Feed incoming data to the connection.
- self.protocol.receive_data(data)
-
- # This isn't expected to raise an exception.
- events = self.protocol.events_received()
-
- # Write outgoing data to the socket.
- try:
- self.send_data()
- except Exception as exc:
- if self.debug:
- self.logger.debug("error while sending data", exc_info=True)
- # Similarly to the above, avoid overriding an exception
- # set by send_context(), in case of a race condition
- # i.e. send_context() closes the socket after recv()
- # returns above but before send_data() calls send().
- self.set_recv_events_exc(exc)
- break
-
- if self.protocol.close_expected():
- # If the connection is expected to close soon, set the
- # close deadline based on the close timeout.
- if self.close_deadline is None:
- self.close_deadline = Deadline(self.close_timeout)
-
- # Unlock conn_mutex before processing events. Else, the
- # application can't send messages in response to events.
-
- # If self.send_data raised an exception, then events are lost.
- # Given that automatic responses write small amounts of data,
- # this should be uncommon, so we don't handle the edge case.
-
- try:
- for event in events:
- # This may raise EOFError if the closing handshake
- # times out while a message is waiting to be read.
- self.process_event(event)
- except EOFError:
- break
-
- # Breaking out of the while True: ... loop means that we believe
- # that the socket doesn't work anymore.
- with self.protocol_mutex:
- # Feed the end of the data stream to the connection.
- self.protocol.receive_eof()
-
- # This isn't expected to generate events.
- assert not self.protocol.events_received()
-
- # There is no error handling because send_data() can only write
- # the end of the data stream here and it handles errors itself.
- self.send_data()
-
- except Exception as exc:
- # This branch should never run. It's a safety net in case of bugs.
- self.logger.error("unexpected internal error", exc_info=True)
- with self.protocol_mutex:
- self.set_recv_events_exc(exc)
- # We don't know where we crashed. Force protocol state to CLOSED.
- self.protocol.state = CLOSED
- finally:
- # This isn't expected to raise an exception.
- self.close_socket()
-
- @contextlib.contextmanager
- def send_context(
- self,
- *,
- expected_state: State = OPEN, # CONNECTING during the opening handshake
- ) -> Iterator[None]:
- """
- Create a context for writing to the connection from user code.
-
- On entry, :meth:`send_context` acquires the connection lock and checks
- that the connection is open; on exit, it writes outgoing data to the
- socket::
-
- with self.send_context():
- self.protocol.send_text(message.encode("utf-8"))
-
- When the connection isn't open on entry, when the connection is expected
- to close on exit, or when an unexpected error happens, terminating the
- connection, :meth:`send_context` waits until the connection is closed
- then raises :exc:`~websockets.exceptions.ConnectionClosed`.
-
- """
- # Should we wait until the connection is closed?
- wait_for_close = False
- # Should we close the socket and raise ConnectionClosed?
- raise_close_exc = False
- # What exception should we chain ConnectionClosed to?
- original_exc: Optional[BaseException] = None
-
- # Acquire the protocol lock.
- with self.protocol_mutex:
- if self.protocol.state is expected_state:
- # Let the caller interact with the protocol.
- try:
- yield
- except (ProtocolError, RuntimeError):
- # The protocol state wasn't changed. Exit immediately.
- raise
- except Exception as exc:
- self.logger.error("unexpected internal error", exc_info=True)
- # This branch should never run. It's a safety net in case of
- # bugs. Since we don't know what happened, we will close the
- # connection and raise the exception to the caller.
- wait_for_close = False
- raise_close_exc = True
- original_exc = exc
- else:
- # Check if the connection is expected to close soon.
- if self.protocol.close_expected():
- wait_for_close = True
- # If the connection is expected to close soon, set the
- # close deadline based on the close timeout.
-
- # Since we tested earlier that protocol.state was OPEN
- # (or CONNECTING) and we didn't release protocol_mutex,
- # it is certain that self.close_deadline is still None.
- assert self.close_deadline is None
- self.close_deadline = Deadline(self.close_timeout)
- # Write outgoing data to the socket.
- try:
- self.send_data()
- except Exception as exc:
- if self.debug:
- self.logger.debug("error while sending data", exc_info=True)
- # While the only expected exception here is OSError,
- # other exceptions would be treated identically.
- wait_for_close = False
- raise_close_exc = True
- original_exc = exc
-
- else: # self.protocol.state is not expected_state
- # Minor layering violation: we assume that the connection
- # will be closing soon if it isn't in the expected state.
- wait_for_close = True
- raise_close_exc = True
-
- # To avoid a deadlock, release the connection lock by exiting the
- # context manager before waiting for recv_events() to terminate.
-
- # If the connection is expected to close soon and the close timeout
- # elapses, close the socket to terminate the connection.
- if wait_for_close:
- if self.close_deadline is None:
- timeout = self.close_timeout
- else:
- # Thread.join() returns immediately if timeout is negative.
- timeout = self.close_deadline.timeout(raise_if_elapsed=False)
- self.recv_events_thread.join(timeout)
-
- if self.recv_events_thread.is_alive():
- # There's no risk to overwrite another error because
- # original_exc is never set when wait_for_close is True.
- assert original_exc is None
- original_exc = TimeoutError("timed out while closing connection")
- # Set recv_events_exc before closing the socket in order to get
- # proper exception reporting.
- raise_close_exc = True
- with self.protocol_mutex:
- self.set_recv_events_exc(original_exc)
-
- # If an error occurred, close the socket to terminate the connection and
- # raise an exception.
- if raise_close_exc:
- self.close_socket()
- self.recv_events_thread.join()
- raise self.protocol.close_exc from original_exc
-
- def send_data(self) -> None:
- """
- Send outgoing data.
-
- This method requires holding protocol_mutex.
-
- Raises:
- OSError: When a socket operations fails.
-
- """
- assert self.protocol_mutex.locked()
- for data in self.protocol.data_to_send():
- if data:
- if self.close_deadline is not None:
- self.socket.settimeout(self.close_deadline.timeout())
- self.socket.sendall(data)
- else:
- try:
- self.socket.shutdown(socket.SHUT_WR)
- except OSError: # socket already closed
- pass
-
- def set_recv_events_exc(self, exc: Optional[BaseException]) -> None:
- """
- Set recv_events_exc, if not set yet.
-
- This method requires holding protocol_mutex.
-
- """
- assert self.protocol_mutex.locked()
- if self.recv_events_exc is None:
- self.recv_events_exc = exc
-
- def close_socket(self) -> None:
- """
- Shutdown and close socket. Close message assembler.
-
- Calling close_socket() guarantees that recv_events() terminates. Indeed,
- recv_events() may block only on socket.recv() or on recv_messages.put().
-
- """
- # shutdown() is required to interrupt recv() on Linux.
- try:
- self.socket.shutdown(socket.SHUT_RDWR)
- except OSError:
- pass # socket is already closed
- self.socket.close()
- self.recv_messages.close()
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/messages.py b/venv/lib/python3.11/site-packages/websockets/sync/messages.py
deleted file mode 100644
index 67a2231..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/messages.py
+++ /dev/null
@@ -1,281 +0,0 @@
-from __future__ import annotations
-
-import codecs
-import queue
-import threading
-from typing import Iterator, List, Optional, cast
-
-from ..frames import Frame, Opcode
-from ..typing import Data
-
-
-__all__ = ["Assembler"]
-
-UTF8Decoder = codecs.getincrementaldecoder("utf-8")
-
-
-class Assembler:
- """
- Assemble messages from frames.
-
- """
-
- def __init__(self) -> None:
- # Serialize reads and writes -- except for reads via synchronization
- # primitives provided by the threading and queue modules.
- self.mutex = threading.Lock()
-
- # We create a latch with two events to ensure proper interleaving of
- # writing and reading messages.
- # put() sets this event to tell get() that a message can be fetched.
- self.message_complete = threading.Event()
- # get() sets this event to let put() that the message was fetched.
- self.message_fetched = threading.Event()
-
- # This flag prevents concurrent calls to get() by user code.
- self.get_in_progress = False
- # This flag prevents concurrent calls to put() by library code.
- self.put_in_progress = False
-
- # Decoder for text frames, None for binary frames.
- self.decoder: Optional[codecs.IncrementalDecoder] = None
-
- # Buffer of frames belonging to the same message.
- self.chunks: List[Data] = []
-
- # When switching from "buffering" to "streaming", we use a thread-safe
- # queue for transferring frames from the writing thread (library code)
- # to the reading thread (user code). We're buffering when chunks_queue
- # is None and streaming when it's a SimpleQueue. None is a sentinel
- # value marking the end of the stream, superseding message_complete.
-
- # Stream data from frames belonging to the same message.
- # Remove quotes around type when dropping Python < 3.9.
- self.chunks_queue: Optional["queue.SimpleQueue[Optional[Data]]"] = None
-
- # This flag marks the end of the stream.
- self.closed = False
-
- def get(self, timeout: Optional[float] = None) -> Data:
- """
- Read the next message.
-
- :meth:`get` returns a single :class:`str` or :class:`bytes`.
-
- If the message is fragmented, :meth:`get` waits until the last frame is
- received, then it reassembles the message and returns it. To receive
- messages frame by frame, use :meth:`get_iter` instead.
-
- Args:
- timeout: If a timeout is provided and elapses before a complete
- message is received, :meth:`get` raises :exc:`TimeoutError`.
-
- Raises:
- EOFError: If the stream of frames has ended.
- RuntimeError: If two threads run :meth:`get` or :meth:``get_iter`
- concurrently.
-
- """
- with self.mutex:
- if self.closed:
- raise EOFError("stream of frames ended")
-
- if self.get_in_progress:
- raise RuntimeError("get or get_iter is already running")
-
- self.get_in_progress = True
-
- # If the message_complete event isn't set yet, release the lock to
- # allow put() to run and eventually set it.
- # Locking with get_in_progress ensures only one thread can get here.
- completed = self.message_complete.wait(timeout)
-
- with self.mutex:
- self.get_in_progress = False
-
- # Waiting for a complete message timed out.
- if not completed:
- raise TimeoutError(f"timed out in {timeout:.1f}s")
-
- # get() was unblocked by close() rather than put().
- if self.closed:
- raise EOFError("stream of frames ended")
-
- assert self.message_complete.is_set()
- self.message_complete.clear()
-
- joiner: Data = b"" if self.decoder is None else ""
- # mypy cannot figure out that chunks have the proper type.
- message: Data = joiner.join(self.chunks) # type: ignore
-
- assert not self.message_fetched.is_set()
- self.message_fetched.set()
-
- self.chunks = []
- assert self.chunks_queue is None
-
- return message
-
- def get_iter(self) -> Iterator[Data]:
- """
- Stream the next message.
-
- Iterating the return value of :meth:`get_iter` yields a :class:`str` or
- :class:`bytes` for each frame in the message.
-
- The iterator must be fully consumed before calling :meth:`get_iter` or
- :meth:`get` again. Else, :exc:`RuntimeError` is raised.
-
- This method only makes sense for fragmented messages. If messages aren't
- fragmented, use :meth:`get` instead.
-
- Raises:
- EOFError: If the stream of frames has ended.
- RuntimeError: If two threads run :meth:`get` or :meth:``get_iter`
- concurrently.
-
- """
- with self.mutex:
- if self.closed:
- raise EOFError("stream of frames ended")
-
- if self.get_in_progress:
- raise RuntimeError("get or get_iter is already running")
-
- chunks = self.chunks
- self.chunks = []
- self.chunks_queue = cast(
- # Remove quotes around type when dropping Python < 3.9.
- "queue.SimpleQueue[Optional[Data]]",
- queue.SimpleQueue(),
- )
-
- # Sending None in chunk_queue supersedes setting message_complete
- # when switching to "streaming". If message is already complete
- # when the switch happens, put() didn't send None, so we have to.
- if self.message_complete.is_set():
- self.chunks_queue.put(None)
-
- self.get_in_progress = True
-
- # Locking with get_in_progress ensures only one thread can get here.
- yield from chunks
- while True:
- chunk = self.chunks_queue.get()
- if chunk is None:
- break
- yield chunk
-
- with self.mutex:
- self.get_in_progress = False
-
- assert self.message_complete.is_set()
- self.message_complete.clear()
-
- # get_iter() was unblocked by close() rather than put().
- if self.closed:
- raise EOFError("stream of frames ended")
-
- assert not self.message_fetched.is_set()
- self.message_fetched.set()
-
- assert self.chunks == []
- self.chunks_queue = None
-
- def put(self, frame: Frame) -> None:
- """
- Add ``frame`` to the next message.
-
- When ``frame`` is the final frame in a message, :meth:`put` waits until
- the message is fetched, either by calling :meth:`get` or by fully
- consuming the return value of :meth:`get_iter`.
-
- :meth:`put` assumes that the stream of frames respects the protocol. If
- it doesn't, the behavior is undefined.
-
- Raises:
- EOFError: If the stream of frames has ended.
- RuntimeError: If two threads run :meth:`put` concurrently.
-
- """
- with self.mutex:
- if self.closed:
- raise EOFError("stream of frames ended")
-
- if self.put_in_progress:
- raise RuntimeError("put is already running")
-
- if frame.opcode is Opcode.TEXT:
- self.decoder = UTF8Decoder(errors="strict")
- elif frame.opcode is Opcode.BINARY:
- self.decoder = None
- elif frame.opcode is Opcode.CONT:
- pass
- else:
- # Ignore control frames.
- return
-
- data: Data
- if self.decoder is not None:
- data = self.decoder.decode(frame.data, frame.fin)
- else:
- data = frame.data
-
- if self.chunks_queue is None:
- self.chunks.append(data)
- else:
- self.chunks_queue.put(data)
-
- if not frame.fin:
- return
-
- # Message is complete. Wait until it's fetched to return.
-
- assert not self.message_complete.is_set()
- self.message_complete.set()
-
- if self.chunks_queue is not None:
- self.chunks_queue.put(None)
-
- assert not self.message_fetched.is_set()
-
- self.put_in_progress = True
-
- # Release the lock to allow get() to run and eventually set the event.
- self.message_fetched.wait()
-
- with self.mutex:
- self.put_in_progress = False
-
- assert self.message_fetched.is_set()
- self.message_fetched.clear()
-
- # put() was unblocked by close() rather than get() or get_iter().
- if self.closed:
- raise EOFError("stream of frames ended")
-
- self.decoder = None
-
- def close(self) -> None:
- """
- End the stream of frames.
-
- Callling :meth:`close` concurrently with :meth:`get`, :meth:`get_iter`,
- or :meth:`put` is safe. They will raise :exc:`EOFError`.
-
- """
- with self.mutex:
- if self.closed:
- return
-
- self.closed = True
-
- # Unblock get or get_iter.
- if self.get_in_progress:
- self.message_complete.set()
- if self.chunks_queue is not None:
- self.chunks_queue.put(None)
-
- # Unblock put().
- if self.put_in_progress:
- self.message_fetched.set()
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/server.py b/venv/lib/python3.11/site-packages/websockets/sync/server.py
deleted file mode 100644
index 1476796..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/server.py
+++ /dev/null
@@ -1,530 +0,0 @@
-from __future__ import annotations
-
-import http
-import logging
-import os
-import selectors
-import socket
-import ssl
-import sys
-import threading
-from types import TracebackType
-from typing import Any, Callable, Optional, Sequence, Type
-
-from websockets.frames import CloseCode
-
-from ..extensions.base import ServerExtensionFactory
-from ..extensions.permessage_deflate import enable_server_permessage_deflate
-from ..headers import validate_subprotocols
-from ..http import USER_AGENT
-from ..http11 import Request, Response
-from ..protocol import CONNECTING, OPEN, Event
-from ..server import ServerProtocol
-from ..typing import LoggerLike, Origin, Subprotocol
-from .connection import Connection
-from .utils import Deadline
-
-
-__all__ = ["serve", "unix_serve", "ServerConnection", "WebSocketServer"]
-
-
-class ServerConnection(Connection):
- """
- Threaded implementation of a WebSocket server connection.
-
- :class:`ServerConnection` provides :meth:`recv` and :meth:`send` methods for
- receiving and sending messages.
-
- It supports iteration to receive messages::
-
- for message in websocket:
- process(message)
-
- The iterator exits normally when the connection is closed with close code
- 1000 (OK) or 1001 (going away) or without a close code. It raises a
- :exc:`~websockets.exceptions.ConnectionClosedError` when the connection is
- closed with any other code.
-
- Args:
- socket: Socket connected to a WebSocket client.
- protocol: Sans-I/O connection.
- close_timeout: Timeout for closing the connection in seconds.
-
- """
-
- def __init__(
- self,
- socket: socket.socket,
- protocol: ServerProtocol,
- *,
- close_timeout: Optional[float] = 10,
- ) -> None:
- self.protocol: ServerProtocol
- self.request_rcvd = threading.Event()
- super().__init__(
- socket,
- protocol,
- close_timeout=close_timeout,
- )
-
- def handshake(
- self,
- process_request: Optional[
- Callable[
- [ServerConnection, Request],
- Optional[Response],
- ]
- ] = None,
- process_response: Optional[
- Callable[
- [ServerConnection, Request, Response],
- Optional[Response],
- ]
- ] = None,
- server_header: Optional[str] = USER_AGENT,
- timeout: Optional[float] = None,
- ) -> None:
- """
- Perform the opening handshake.
-
- """
- if not self.request_rcvd.wait(timeout):
- self.close_socket()
- self.recv_events_thread.join()
- raise TimeoutError("timed out during handshake")
-
- if self.request is None:
- self.close_socket()
- self.recv_events_thread.join()
- raise ConnectionError("connection closed during handshake")
-
- with self.send_context(expected_state=CONNECTING):
- self.response = None
-
- if process_request is not None:
- try:
- self.response = process_request(self, self.request)
- except Exception as exc:
- self.protocol.handshake_exc = exc
- self.logger.error("opening handshake failed", exc_info=True)
- self.response = self.protocol.reject(
- http.HTTPStatus.INTERNAL_SERVER_ERROR,
- (
- "Failed to open a WebSocket connection.\n"
- "See server log for more information.\n"
- ),
- )
-
- if self.response is None:
- self.response = self.protocol.accept(self.request)
-
- if server_header is not None:
- self.response.headers["Server"] = server_header
-
- if process_response is not None:
- try:
- response = process_response(self, self.request, self.response)
- except Exception as exc:
- self.protocol.handshake_exc = exc
- self.logger.error("opening handshake failed", exc_info=True)
- self.response = self.protocol.reject(
- http.HTTPStatus.INTERNAL_SERVER_ERROR,
- (
- "Failed to open a WebSocket connection.\n"
- "See server log for more information.\n"
- ),
- )
- else:
- if response is not None:
- self.response = response
-
- self.protocol.send_response(self.response)
-
- if self.protocol.state is not OPEN:
- self.recv_events_thread.join(self.close_timeout)
- self.close_socket()
- self.recv_events_thread.join()
-
- if self.protocol.handshake_exc is not None:
- raise self.protocol.handshake_exc
-
- def process_event(self, event: Event) -> None:
- """
- Process one incoming event.
-
- """
- # First event - handshake request.
- if self.request is None:
- assert isinstance(event, Request)
- self.request = event
- self.request_rcvd.set()
- # Later events - frames.
- else:
- super().process_event(event)
-
- def recv_events(self) -> None:
- """
- Read incoming data from the socket and process events.
-
- """
- try:
- super().recv_events()
- finally:
- # If the connection is closed during the handshake, unblock it.
- self.request_rcvd.set()
-
-
-class WebSocketServer:
- """
- WebSocket server returned by :func:`serve`.
-
- This class mirrors the API of :class:`~socketserver.BaseServer`, notably the
- :meth:`~socketserver.BaseServer.serve_forever` and
- :meth:`~socketserver.BaseServer.shutdown` methods, as well as the context
- manager protocol.
-
- Args:
- socket: Server socket listening for new connections.
- handler: Handler for one connection. Receives the socket and address
- returned by :meth:`~socket.socket.accept`.
- logger: Logger for this server.
-
- """
-
- def __init__(
- self,
- socket: socket.socket,
- handler: Callable[[socket.socket, Any], None],
- logger: Optional[LoggerLike] = None,
- ):
- self.socket = socket
- self.handler = handler
- if logger is None:
- logger = logging.getLogger("websockets.server")
- self.logger = logger
- if sys.platform != "win32":
- self.shutdown_watcher, self.shutdown_notifier = os.pipe()
-
- def serve_forever(self) -> None:
- """
- See :meth:`socketserver.BaseServer.serve_forever`.
-
- This method doesn't return. Calling :meth:`shutdown` from another thread
- stops the server.
-
- Typical use::
-
- with serve(...) as server:
- server.serve_forever()
-
- """
- poller = selectors.DefaultSelector()
- poller.register(self.socket, selectors.EVENT_READ)
- if sys.platform != "win32":
- poller.register(self.shutdown_watcher, selectors.EVENT_READ)
-
- while True:
- poller.select()
- try:
- # If the socket is closed, this will raise an exception and exit
- # the loop. So we don't need to check the return value of select().
- sock, addr = self.socket.accept()
- except OSError:
- break
- thread = threading.Thread(target=self.handler, args=(sock, addr))
- thread.start()
-
- def shutdown(self) -> None:
- """
- See :meth:`socketserver.BaseServer.shutdown`.
-
- """
- self.socket.close()
- if sys.platform != "win32":
- os.write(self.shutdown_notifier, b"x")
-
- def fileno(self) -> int:
- """
- See :meth:`socketserver.BaseServer.fileno`.
-
- """
- return self.socket.fileno()
-
- def __enter__(self) -> WebSocketServer:
- return self
-
- def __exit__(
- self,
- exc_type: Optional[Type[BaseException]],
- exc_value: Optional[BaseException],
- traceback: Optional[TracebackType],
- ) -> None:
- self.shutdown()
-
-
-def serve(
- handler: Callable[[ServerConnection], None],
- host: Optional[str] = None,
- port: Optional[int] = None,
- *,
- # TCP/TLS — unix and path are only for unix_serve()
- sock: Optional[socket.socket] = None,
- ssl_context: Optional[ssl.SSLContext] = None,
- unix: bool = False,
- path: Optional[str] = None,
- # WebSocket
- origins: Optional[Sequence[Optional[Origin]]] = None,
- extensions: Optional[Sequence[ServerExtensionFactory]] = None,
- subprotocols: Optional[Sequence[Subprotocol]] = None,
- select_subprotocol: Optional[
- Callable[
- [ServerConnection, Sequence[Subprotocol]],
- Optional[Subprotocol],
- ]
- ] = None,
- process_request: Optional[
- Callable[
- [ServerConnection, Request],
- Optional[Response],
- ]
- ] = None,
- process_response: Optional[
- Callable[
- [ServerConnection, Request, Response],
- Optional[Response],
- ]
- ] = None,
- server_header: Optional[str] = USER_AGENT,
- compression: Optional[str] = "deflate",
- # Timeouts
- open_timeout: Optional[float] = 10,
- close_timeout: Optional[float] = 10,
- # Limits
- max_size: Optional[int] = 2**20,
- # Logging
- logger: Optional[LoggerLike] = None,
- # Escape hatch for advanced customization
- create_connection: Optional[Type[ServerConnection]] = None,
-) -> WebSocketServer:
- """
- Create a WebSocket server listening on ``host`` and ``port``.
-
- Whenever a client connects, the server creates a :class:`ServerConnection`,
- performs the opening handshake, and delegates to the ``handler``.
-
- The handler receives a :class:`ServerConnection` instance, which you can use
- to send and receive messages.
-
- Once the handler completes, either normally or with an exception, the server
- performs the closing handshake and closes the connection.
-
- :class:`WebSocketServer` mirrors the API of
- :class:`~socketserver.BaseServer`. Treat it as a context manager to ensure
- that it will be closed and call the :meth:`~WebSocketServer.serve_forever`
- method to serve requests::
-
- def handler(websocket):
- ...
-
- with websockets.sync.server.serve(handler, ...) as server:
- server.serve_forever()
-
- Args:
- handler: Connection handler. It receives the WebSocket connection,
- which is a :class:`ServerConnection`, in argument.
- host: Network interfaces the server binds to.
- See :func:`~socket.create_server` for details.
- port: TCP port the server listens on.
- See :func:`~socket.create_server` for details.
- sock: Preexisting TCP socket. ``sock`` replaces ``host`` and ``port``.
- You may call :func:`socket.create_server` to create a suitable TCP
- socket.
- ssl_context: Configuration for enabling TLS on the connection.
- origins: Acceptable values of the ``Origin`` header, for defending
- against Cross-Site WebSocket Hijacking attacks. Include :obj:`None`
- in the list if the lack of an origin is acceptable.
- extensions: List of supported extensions, in order in which they
- should be negotiated and run.
- subprotocols: List of supported subprotocols, in order of decreasing
- preference.
- select_subprotocol: Callback for selecting a subprotocol among
- those supported by the client and the server. It receives a
- :class:`ServerConnection` (not a
- :class:`~websockets.server.ServerProtocol`!) instance and a list of
- subprotocols offered by the client. Other than the first argument,
- it has the same behavior as the
- :meth:`ServerProtocol.select_subprotocol
- <websockets.server.ServerProtocol.select_subprotocol>` method.
- process_request: Intercept the request during the opening handshake.
- Return an HTTP response to force the response or :obj:`None` to
- continue normally. When you force an HTTP 101 Continue response,
- the handshake is successful. Else, the connection is aborted.
- process_response: Intercept the response during the opening handshake.
- Return an HTTP response to force the response or :obj:`None` to
- continue normally. When you force an HTTP 101 Continue response,
- the handshake is successful. Else, the connection is aborted.
- server_header: Value of the ``Server`` response header.
- It defaults to ``"Python/x.y.z websockets/X.Y"``. Setting it to
- :obj:`None` removes the header.
- compression: The "permessage-deflate" extension is enabled by default.
- Set ``compression`` to :obj:`None` to disable it. See the
- :doc:`compression guide <../../topics/compression>` for details.
- open_timeout: Timeout for opening connections in seconds.
- :obj:`None` disables the timeout.
- close_timeout: Timeout for closing connections in seconds.
- :obj:`None` disables the timeout.
- max_size: Maximum size of incoming messages in bytes.
- :obj:`None` disables the limit.
- logger: Logger for this server.
- It defaults to ``logging.getLogger("websockets.server")``. See the
- :doc:`logging guide <../../topics/logging>` for details.
- create_connection: Factory for the :class:`ServerConnection` managing
- the connection. Set it to a wrapper or a subclass to customize
- connection handling.
- """
-
- # Process parameters
-
- if subprotocols is not None:
- validate_subprotocols(subprotocols)
-
- if compression == "deflate":
- extensions = enable_server_permessage_deflate(extensions)
- elif compression is not None:
- raise ValueError(f"unsupported compression: {compression}")
-
- if create_connection is None:
- create_connection = ServerConnection
-
- # Bind socket and listen
-
- if sock is None:
- if unix:
- if path is None:
- raise TypeError("missing path argument")
- sock = socket.create_server(path, family=socket.AF_UNIX)
- else:
- sock = socket.create_server((host, port))
- else:
- if path is not None:
- raise TypeError("path and sock arguments are incompatible")
-
- # Initialize TLS wrapper
-
- if ssl_context is not None:
- sock = ssl_context.wrap_socket(
- sock,
- server_side=True,
- # Delay TLS handshake until after we set a timeout on the socket.
- do_handshake_on_connect=False,
- )
-
- # Define request handler
-
- def conn_handler(sock: socket.socket, addr: Any) -> None:
- # Calculate timeouts on the TLS and WebSocket handshakes.
- # The TLS timeout must be set on the socket, then removed
- # to avoid conflicting with the WebSocket timeout in handshake().
- deadline = Deadline(open_timeout)
-
- try:
- # Disable Nagle algorithm
-
- if not unix:
- sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, True)
-
- # Perform TLS handshake
-
- if ssl_context is not None:
- sock.settimeout(deadline.timeout())
- assert isinstance(sock, ssl.SSLSocket) # mypy cannot figure this out
- sock.do_handshake()
- sock.settimeout(None)
-
- # Create a closure so that select_subprotocol has access to self.
-
- protocol_select_subprotocol: Optional[
- Callable[
- [ServerProtocol, Sequence[Subprotocol]],
- Optional[Subprotocol],
- ]
- ] = None
-
- if select_subprotocol is not None:
-
- def protocol_select_subprotocol(
- protocol: ServerProtocol,
- subprotocols: Sequence[Subprotocol],
- ) -> Optional[Subprotocol]:
- # mypy doesn't know that select_subprotocol is immutable.
- assert select_subprotocol is not None
- # Ensure this function is only used in the intended context.
- assert protocol is connection.protocol
- return select_subprotocol(connection, subprotocols)
-
- # Initialize WebSocket connection
-
- protocol = ServerProtocol(
- origins=origins,
- extensions=extensions,
- subprotocols=subprotocols,
- select_subprotocol=protocol_select_subprotocol,
- state=CONNECTING,
- max_size=max_size,
- logger=logger,
- )
-
- # Initialize WebSocket protocol
-
- assert create_connection is not None # help mypy
- connection = create_connection(
- sock,
- protocol,
- close_timeout=close_timeout,
- )
- # On failure, handshake() closes the socket, raises an exception, and
- # logs it.
- connection.handshake(
- process_request,
- process_response,
- server_header,
- deadline.timeout(),
- )
-
- except Exception:
- sock.close()
- return
-
- try:
- handler(connection)
- except Exception:
- protocol.logger.error("connection handler failed", exc_info=True)
- connection.close(CloseCode.INTERNAL_ERROR)
- else:
- connection.close()
-
- # Initialize server
-
- return WebSocketServer(sock, conn_handler, logger)
-
-
-def unix_serve(
- handler: Callable[[ServerConnection], Any],
- path: Optional[str] = None,
- **kwargs: Any,
-) -> WebSocketServer:
- """
- Create a WebSocket server listening on a Unix socket.
-
- This function is identical to :func:`serve`, except the ``host`` and
- ``port`` arguments are replaced by ``path``. It's only available on Unix.
-
- It's useful for deploying a server behind a reverse proxy such as nginx.
-
- Args:
- handler: Connection handler. It receives the WebSocket connection,
- which is a :class:`ServerConnection`, in argument.
- path: File system path to the Unix socket.
-
- """
- return serve(handler, path=path, unix=True, **kwargs)
diff --git a/venv/lib/python3.11/site-packages/websockets/sync/utils.py b/venv/lib/python3.11/site-packages/websockets/sync/utils.py
deleted file mode 100644
index 471f32e..0000000
--- a/venv/lib/python3.11/site-packages/websockets/sync/utils.py
+++ /dev/null
@@ -1,46 +0,0 @@
-from __future__ import annotations
-
-import time
-from typing import Optional
-
-
-__all__ = ["Deadline"]
-
-
-class Deadline:
- """
- Manage timeouts across multiple steps.
-
- Args:
- timeout: Time available in seconds or :obj:`None` if there is no limit.
-
- """
-
- def __init__(self, timeout: Optional[float]) -> None:
- self.deadline: Optional[float]
- if timeout is None:
- self.deadline = None
- else:
- self.deadline = time.monotonic() + timeout
-
- def timeout(self, *, raise_if_elapsed: bool = True) -> Optional[float]:
- """
- Calculate a timeout from a deadline.
-
- Args:
- raise_if_elapsed (bool): Whether to raise :exc:`TimeoutError`
- if the deadline lapsed.
-
- Raises:
- TimeoutError: If the deadline lapsed.
-
- Returns:
- Time left in seconds or :obj:`None` if there is no limit.
-
- """
- if self.deadline is None:
- return None
- timeout = self.deadline - time.monotonic()
- if raise_if_elapsed and timeout <= 0:
- raise TimeoutError("timed out")
- return timeout
diff --git a/venv/lib/python3.11/site-packages/websockets/typing.py b/venv/lib/python3.11/site-packages/websockets/typing.py
deleted file mode 100644
index cc3e3ec..0000000
--- a/venv/lib/python3.11/site-packages/websockets/typing.py
+++ /dev/null
@@ -1,67 +0,0 @@
-from __future__ import annotations
-
-import http
-import logging
-from typing import List, NewType, Optional, Tuple, Union
-
-
-__all__ = [
- "Data",
- "LoggerLike",
- "StatusLike",
- "Origin",
- "Subprotocol",
- "ExtensionName",
- "ExtensionParameter",
-]
-
-
-# Public types used in the signature of public APIs
-
-Data = Union[str, bytes]
-"""Types supported in a WebSocket message:
-:class:`str` for a Text_ frame, :class:`bytes` for a Binary_.
-
-.. _Text: https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-.. _Binary : https://www.rfc-editor.org/rfc/rfc6455.html#section-5.6
-
-"""
-
-
-LoggerLike = Union[logging.Logger, logging.LoggerAdapter]
-"""Types accepted where a :class:`~logging.Logger` is expected."""
-
-
-StatusLike = Union[http.HTTPStatus, int]
-"""
-Types accepted where an :class:`~http.HTTPStatus` is expected."""
-
-
-Origin = NewType("Origin", str)
-"""Value of a ``Origin`` header."""
-
-
-Subprotocol = NewType("Subprotocol", str)
-"""Subprotocol in a ``Sec-WebSocket-Protocol`` header."""
-
-
-ExtensionName = NewType("ExtensionName", str)
-"""Name of a WebSocket extension."""
-
-
-ExtensionParameter = Tuple[str, Optional[str]]
-"""Parameter of a WebSocket extension."""
-
-
-# Private types
-
-ExtensionHeader = Tuple[ExtensionName, List[ExtensionParameter]]
-"""Extension in a ``Sec-WebSocket-Extensions`` header."""
-
-
-ConnectionOption = NewType("ConnectionOption", str)
-"""Connection option in a ``Connection`` header."""
-
-
-UpgradeProtocol = NewType("UpgradeProtocol", str)
-"""Upgrade protocol in an ``Upgrade`` header."""
diff --git a/venv/lib/python3.11/site-packages/websockets/uri.py b/venv/lib/python3.11/site-packages/websockets/uri.py
deleted file mode 100644
index 385090f..0000000
--- a/venv/lib/python3.11/site-packages/websockets/uri.py
+++ /dev/null
@@ -1,108 +0,0 @@
-from __future__ import annotations
-
-import dataclasses
-import urllib.parse
-from typing import Optional, Tuple
-
-from . import exceptions
-
-
-__all__ = ["parse_uri", "WebSocketURI"]
-
-
-@dataclasses.dataclass
-class WebSocketURI:
- """
- WebSocket URI.
-
- Attributes:
- secure: :obj:`True` for a ``wss`` URI, :obj:`False` for a ``ws`` URI.
- host: Normalized to lower case.
- port: Always set even if it's the default.
- path: May be empty.
- query: May be empty if the URI doesn't include a query component.
- username: Available when the URI contains `User Information`_.
- password: Available when the URI contains `User Information`_.
-
- .. _User Information: https://www.rfc-editor.org/rfc/rfc3986.html#section-3.2.1
-
- """
-
- secure: bool
- host: str
- port: int
- path: str
- query: str
- username: Optional[str] = None
- password: Optional[str] = None
-
- @property
- def resource_name(self) -> str:
- if self.path:
- resource_name = self.path
- else:
- resource_name = "/"
- if self.query:
- resource_name += "?" + self.query
- return resource_name
-
- @property
- def user_info(self) -> Optional[Tuple[str, str]]:
- if self.username is None:
- return None
- assert self.password is not None
- return (self.username, self.password)
-
-
-# All characters from the gen-delims and sub-delims sets in RFC 3987.
-DELIMS = ":/?#[]@!$&'()*+,;="
-
-
-def parse_uri(uri: str) -> WebSocketURI:
- """
- Parse and validate a WebSocket URI.
-
- Args:
- uri: WebSocket URI.
-
- Returns:
- WebSocketURI: Parsed WebSocket URI.
-
- Raises:
- InvalidURI: if ``uri`` isn't a valid WebSocket URI.
-
- """
- parsed = urllib.parse.urlparse(uri)
- if parsed.scheme not in ["ws", "wss"]:
- raise exceptions.InvalidURI(uri, "scheme isn't ws or wss")
- if parsed.hostname is None:
- raise exceptions.InvalidURI(uri, "hostname isn't provided")
- if parsed.fragment != "":
- raise exceptions.InvalidURI(uri, "fragment identifier is meaningless")
-
- secure = parsed.scheme == "wss"
- host = parsed.hostname
- port = parsed.port or (443 if secure else 80)
- path = parsed.path
- query = parsed.query
- username = parsed.username
- password = parsed.password
- # urllib.parse.urlparse accepts URLs with a username but without a
- # password. This doesn't make sense for HTTP Basic Auth credentials.
- if username is not None and password is None:
- raise exceptions.InvalidURI(uri, "username provided without password")
-
- try:
- uri.encode("ascii")
- except UnicodeEncodeError:
- # Input contains non-ASCII characters.
- # It must be an IRI. Convert it to a URI.
- host = host.encode("idna").decode()
- path = urllib.parse.quote(path, safe=DELIMS)
- query = urllib.parse.quote(query, safe=DELIMS)
- if username is not None:
- assert password is not None
- username = urllib.parse.quote(username, safe=DELIMS)
- password = urllib.parse.quote(password, safe=DELIMS)
-
- return WebSocketURI(secure, host, port, path, query, username, password)
diff --git a/venv/lib/python3.11/site-packages/websockets/utils.py b/venv/lib/python3.11/site-packages/websockets/utils.py
deleted file mode 100644
index c404049..0000000
--- a/venv/lib/python3.11/site-packages/websockets/utils.py
+++ /dev/null
@@ -1,51 +0,0 @@
-from __future__ import annotations
-
-import base64
-import hashlib
-import secrets
-import sys
-
-
-__all__ = ["accept_key", "apply_mask"]
-
-
-GUID = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
-
-
-def generate_key() -> str:
- """
- Generate a random key for the Sec-WebSocket-Key header.
-
- """
- key = secrets.token_bytes(16)
- return base64.b64encode(key).decode()
-
-
-def accept_key(key: str) -> str:
- """
- Compute the value of the Sec-WebSocket-Accept header.
-
- Args:
- key: value of the Sec-WebSocket-Key header.
-
- """
- sha1 = hashlib.sha1((key + GUID).encode()).digest()
- return base64.b64encode(sha1).decode()
-
-
-def apply_mask(data: bytes, mask: bytes) -> bytes:
- """
- Apply masking to the data of a WebSocket message.
-
- Args:
- data: data to mask.
- mask: 4-bytes mask.
-
- """
- if len(mask) != 4:
- raise ValueError("mask must contain 4 bytes")
-
- data_int = int.from_bytes(data, sys.byteorder)
- mask_repeated = mask * (len(data) // 4) + mask[: len(data) % 4]
- mask_int = int.from_bytes(mask_repeated, sys.byteorder)
- return (data_int ^ mask_int).to_bytes(len(data), sys.byteorder)
diff --git a/venv/lib/python3.11/site-packages/websockets/version.py b/venv/lib/python3.11/site-packages/websockets/version.py
deleted file mode 100644
index d1c9945..0000000
--- a/venv/lib/python3.11/site-packages/websockets/version.py
+++ /dev/null
@@ -1,82 +0,0 @@
-from __future__ import annotations
-
-import importlib.metadata
-
-
-__all__ = ["tag", "version", "commit"]
-
-
-# ========= =========== ===================
-# release development
-# ========= =========== ===================
-# tag X.Y X.Y (upcoming)
-# version X.Y X.Y.dev1+g5678cde
-# commit X.Y 5678cde
-# ========= =========== ===================
-
-
-# When tagging a release, set `released = True`.
-# After tagging a release, set `released = False` and increment `tag`.
-
-released = True
-
-tag = version = commit = "12.0"
-
-
-if not released: # pragma: no cover
- import pathlib
- import re
- import subprocess
-
- def get_version(tag: str) -> str:
- # Since setup.py executes the contents of src/websockets/version.py,
- # __file__ can point to either of these two files.
- file_path = pathlib.Path(__file__)
- root_dir = file_path.parents[0 if file_path.name == "setup.py" else 2]
-
- # Read version from git if available. This prevents reading stale
- # information from src/websockets.egg-info after building a sdist.
- try:
- description = subprocess.run(
- ["git", "describe", "--dirty", "--tags", "--long"],
- capture_output=True,
- cwd=root_dir,
- timeout=1,
- check=True,
- text=True,
- ).stdout.strip()
- # subprocess.run raises FileNotFoundError if git isn't on $PATH.
- except (
- FileNotFoundError,
- subprocess.CalledProcessError,
- subprocess.TimeoutExpired,
- ):
- pass
- else:
- description_re = r"[0-9.]+-([0-9]+)-(g[0-9a-f]{7,}(?:-dirty)?)"
- match = re.fullmatch(description_re, description)
- assert match is not None
- distance, remainder = match.groups()
- remainder = remainder.replace("-", ".") # required by PEP 440
- return f"{tag}.dev{distance}+{remainder}"
-
- # Read version from package metadata if it is installed.
- try:
- return importlib.metadata.version("websockets")
- except ImportError:
- pass
-
- # Avoid crashing if the development version cannot be determined.
- return f"{tag}.dev0+gunknown"
-
- version = get_version(tag)
-
- def get_commit(tag: str, version: str) -> str:
- # Extract commit from version, falling back to tag if not available.
- version_re = r"[0-9.]+\.dev[0-9]+\+g([0-9a-f]{7,}|unknown)(?:\.dirty)?"
- match = re.fullmatch(version_re, version)
- assert match is not None
- (commit,) = match.groups()
- return tag if commit == "unknown" else commit
-
- commit = get_commit(tag, version)