From 12cf076118570eebbff08c6b3090e0d4798447a1 Mon Sep 17 00:00:00 2001 From: cyfraeviolae Date: Wed, 3 Apr 2024 03:17:55 -0400 Subject: no venv --- .../python3.11/site-packages/pygments/__init__.py | 82 - .../python3.11/site-packages/pygments/__main__.py | 17 - .../pygments/__pycache__/__init__.cpython-311.pyc | Bin 3800 -> 0 bytes .../pygments/__pycache__/__main__.cpython-311.pyc | Bin 802 -> 0 bytes .../pygments/__pycache__/cmdline.cpython-311.pyc | Bin 30095 -> 0 bytes .../pygments/__pycache__/console.cpython-311.pyc | Bin 3036 -> 0 bytes .../pygments/__pycache__/filter.cpython-311.pyc | Bin 3497 -> 0 bytes .../pygments/__pycache__/formatter.cpython-311.pyc | Bin 4788 -> 0 bytes .../pygments/__pycache__/lexer.cpython-311.pyc | Bin 42662 -> 0 bytes .../pygments/__pycache__/modeline.cpython-311.pyc | Bin 1716 -> 0 bytes .../pygments/__pycache__/plugin.cpython-311.pyc | Bin 3717 -> 0 bytes .../pygments/__pycache__/regexopt.cpython-311.pyc | Bin 5023 -> 0 bytes .../pygments/__pycache__/scanner.cpython-311.pyc | Bin 4878 -> 0 bytes .../pygments/__pycache__/sphinxext.cpython-311.pyc | Bin 13799 -> 0 bytes .../pygments/__pycache__/style.cpython-311.pyc | Bin 7448 -> 0 bytes .../pygments/__pycache__/token.cpython-311.pyc | Bin 7496 -> 0 bytes .../pygments/__pycache__/unistring.cpython-311.pyc | Bin 33830 -> 0 bytes .../pygments/__pycache__/util.cpython-311.pyc | Bin 15682 -> 0 bytes .../python3.11/site-packages/pygments/cmdline.py | 668 --- .../python3.11/site-packages/pygments/console.py | 70 - .../python3.11/site-packages/pygments/filter.py | 71 - .../site-packages/pygments/filters/__init__.py | 940 ---- .../filters/__pycache__/__init__.cpython-311.pyc | Bin 40049 -> 0 bytes .../python3.11/site-packages/pygments/formatter.py | 124 - .../site-packages/pygments/formatters/__init__.py | 158 - .../__pycache__/__init__.cpython-311.pyc | Bin 7737 -> 0 bytes .../__pycache__/_mapping.cpython-311.pyc | Bin 4215 -> 0 bytes .../formatters/__pycache__/bbcode.cpython-311.pyc | Bin 4447 -> 0 bytes .../formatters/__pycache__/groff.cpython-311.pyc | Bin 7820 -> 0 bytes .../formatters/__pycache__/html.cpython-311.pyc | Bin 42707 -> 0 bytes .../formatters/__pycache__/img.cpython-311.pyc | Bin 30048 -> 0 bytes .../formatters/__pycache__/irc.cpython-311.pyc | Bin 6361 -> 0 bytes .../formatters/__pycache__/latex.cpython-311.pyc | Bin 21749 -> 0 bytes .../formatters/__pycache__/other.cpython-311.pyc | Bin 7577 -> 0 bytes .../__pycache__/pangomarkup.cpython-311.pyc | Bin 3157 -> 0 bytes .../formatters/__pycache__/rtf.cpython-311.pyc | Bin 6812 -> 0 bytes .../formatters/__pycache__/svg.cpython-311.pyc | Bin 9620 -> 0 bytes .../__pycache__/terminal.cpython-311.pyc | Bin 5987 -> 0 bytes .../__pycache__/terminal256.cpython-311.pyc | Bin 16365 -> 0 bytes .../site-packages/pygments/formatters/_mapping.py | 23 - .../site-packages/pygments/formatters/bbcode.py | 108 - .../site-packages/pygments/formatters/groff.py | 170 - .../site-packages/pygments/formatters/html.py | 990 ---- .../site-packages/pygments/formatters/img.py | 684 --- .../site-packages/pygments/formatters/irc.py | 154 - .../site-packages/pygments/formatters/latex.py | 521 -- .../site-packages/pygments/formatters/other.py | 161 - .../pygments/formatters/pangomarkup.py | 83 - .../site-packages/pygments/formatters/rtf.py | 146 - .../site-packages/pygments/formatters/svg.py | 188 - .../site-packages/pygments/formatters/terminal.py | 127 - .../pygments/formatters/terminal256.py | 338 -- .../lib/python3.11/site-packages/pygments/lexer.py | 959 ---- .../site-packages/pygments/lexers/__init__.py | 363 -- .../lexers/__pycache__/__init__.cpython-311.pyc | Bin 16321 -> 0 bytes .../__pycache__/_ada_builtins.cpython-311.pyc | Bin 1245 -> 0 bytes .../__pycache__/_asy_builtins.cpython-311.pyc | Bin 17662 -> 0 bytes .../__pycache__/_cl_builtins.cpython-311.pyc | Bin 11746 -> 0 bytes .../__pycache__/_cocoa_builtins.cpython-311.pyc | Bin 98063 -> 0 bytes .../__pycache__/_csound_builtins.cpython-311.pyc | Bin 16459 -> 0 bytes .../__pycache__/_css_builtins.cpython-311.pyc | Bin 9395 -> 0 bytes .../__pycache__/_julia_builtins.cpython-311.pyc | Bin 8282 -> 0 bytes .../__pycache__/_lasso_builtins.cpython-311.pyc | Bin 76750 -> 0 bytes .../__pycache__/_lilypond_builtins.cpython-311.pyc | Bin 88650 -> 0 bytes .../__pycache__/_lua_builtins.cpython-311.pyc | Bin 9488 -> 0 bytes .../lexers/__pycache__/_mapping.cpython-311.pyc | Bin 64699 -> 0 bytes .../__pycache__/_mql_builtins.cpython-311.pyc | Bin 18019 -> 0 bytes .../__pycache__/_mysql_builtins.cpython-311.pyc | Bin 20410 -> 0 bytes .../__pycache__/_openedge_builtins.cpython-311.pyc | Bin 34103 -> 0 bytes .../__pycache__/_php_builtins.cpython-311.pyc | Bin 66412 -> 0 bytes .../__pycache__/_postgres_builtins.cpython-311.pyc | Bin 12353 -> 0 bytes .../__pycache__/_qlik_builtins.cpython-311.pyc | Bin 6445 -> 0 bytes .../__pycache__/_scheme_builtins.cpython-311.pyc | Bin 23207 -> 0 bytes .../__pycache__/_scilab_builtins.cpython-311.pyc | Bin 35748 -> 0 bytes .../_sourcemod_builtins.cpython-311.pyc | Bin 22536 -> 0 bytes .../__pycache__/_stan_builtins.cpython-311.pyc | Bin 9962 -> 0 bytes .../__pycache__/_stata_builtins.cpython-311.pyc | Bin 21248 -> 0 bytes .../__pycache__/_tsql_builtins.cpython-311.pyc | Bin 8900 -> 0 bytes .../__pycache__/_usd_builtins.cpython-311.pyc | Bin 1448 -> 0 bytes .../__pycache__/_vbscript_builtins.cpython-311.pyc | Bin 3003 -> 0 bytes .../__pycache__/_vim_builtins.cpython-311.pyc | Bin 30806 -> 0 bytes .../__pycache__/actionscript.cpython-311.pyc | Bin 11171 -> 0 bytes .../lexers/__pycache__/ada.cpython-311.pyc | Bin 5441 -> 0 bytes .../lexers/__pycache__/agile.cpython-311.pyc | Bin 1568 -> 0 bytes .../lexers/__pycache__/algebra.cpython-311.pyc | Bin 11474 -> 0 bytes .../lexers/__pycache__/ambient.cpython-311.pyc | Bin 3103 -> 0 bytes .../lexers/__pycache__/amdgpu.cpython-311.pyc | Bin 2188 -> 0 bytes .../lexers/__pycache__/ampl.cpython-311.pyc | Bin 4168 -> 0 bytes .../lexers/__pycache__/apdlexer.cpython-311.pyc | Bin 19148 -> 0 bytes .../lexers/__pycache__/apl.cpython-311.pyc | Bin 2529 -> 0 bytes .../lexers/__pycache__/archetype.cpython-311.pyc | Bin 9499 -> 0 bytes .../lexers/__pycache__/arrow.cpython-311.pyc | Bin 3652 -> 0 bytes .../lexers/__pycache__/arturo.cpython-311.pyc | Bin 9826 -> 0 bytes .../lexers/__pycache__/asc.cpython-311.pyc | Bin 2167 -> 0 bytes .../lexers/__pycache__/asm.cpython-311.pyc | Bin 36031 -> 0 bytes .../lexers/__pycache__/asn1.cpython-311.pyc | Bin 4655 -> 0 bytes .../lexers/__pycache__/automation.cpython-311.pyc | Bin 18404 -> 0 bytes .../lexers/__pycache__/bare.cpython-311.pyc | Bin 3063 -> 0 bytes .../lexers/__pycache__/basic.cpython-311.pyc | Bin 25365 -> 0 bytes .../lexers/__pycache__/bdd.cpython-311.pyc | Bin 2300 -> 0 bytes .../lexers/__pycache__/berry.cpython-311.pyc | Bin 3721 -> 0 bytes .../lexers/__pycache__/bibtex.cpython-311.pyc | Bin 5633 -> 0 bytes .../lexers/__pycache__/blueprint.cpython-311.pyc | Bin 5529 -> 0 bytes .../lexers/__pycache__/boa.cpython-311.pyc | Bin 3665 -> 0 bytes .../lexers/__pycache__/bqn.cpython-311.pyc | Bin 2381 -> 0 bytes .../lexers/__pycache__/business.cpython-311.pyc | Bin 21975 -> 0 bytes .../lexers/__pycache__/c_cpp.cpython-311.pyc | Bin 16485 -> 0 bytes .../lexers/__pycache__/c_like.cpython-311.pyc | Bin 26448 -> 0 bytes .../lexers/__pycache__/capnproto.cpython-311.pyc | Bin 2433 -> 0 bytes .../lexers/__pycache__/carbon.cpython-311.pyc | Bin 3632 -> 0 bytes .../lexers/__pycache__/cddl.cpython-311.pyc | Bin 4569 -> 0 bytes .../lexers/__pycache__/chapel.cpython-311.pyc | Bin 4397 -> 0 bytes .../lexers/__pycache__/clean.cpython-311.pyc | Bin 6282 -> 0 bytes .../lexers/__pycache__/comal.cpython-311.pyc | Bin 3351 -> 0 bytes .../lexers/__pycache__/compiled.cpython-311.pyc | Bin 2401 -> 0 bytes .../lexers/__pycache__/configs.cpython-311.pyc | Bin 45582 -> 0 bytes .../lexers/__pycache__/console.cpython-311.pyc | Bin 4370 -> 0 bytes .../lexers/__pycache__/cplint.cpython-311.pyc | Bin 1897 -> 0 bytes .../lexers/__pycache__/crystal.cpython-311.pyc | Bin 15302 -> 0 bytes .../lexers/__pycache__/csound.cpython-311.pyc | Bin 14637 -> 0 bytes .../lexers/__pycache__/css.cpython-311.pyc | Bin 22095 -> 0 bytes .../pygments/lexers/__pycache__/d.cpython-311.pyc | Bin 8306 -> 0 bytes .../lexers/__pycache__/dalvik.cpython-311.pyc | Bin 4821 -> 0 bytes .../lexers/__pycache__/data.cpython-311.pyc | Bin 23101 -> 0 bytes .../lexers/__pycache__/dax.cpython-311.pyc | Bin 6310 -> 0 bytes .../lexers/__pycache__/devicetree.cpython-311.pyc | Bin 4109 -> 0 bytes .../lexers/__pycache__/diff.cpython-311.pyc | Bin 5708 -> 0 bytes .../lexers/__pycache__/dns.cpython-311.pyc | Bin 3822 -> 0 bytes .../lexers/__pycache__/dotnet.cpython-311.pyc | Bin 34708 -> 0 bytes .../lexers/__pycache__/dsls.cpython-311.pyc | Bin 34488 -> 0 bytes .../lexers/__pycache__/dylan.cpython-311.pyc | Bin 9977 -> 0 bytes .../lexers/__pycache__/ecl.cpython-311.pyc | Bin 5761 -> 0 bytes .../lexers/__pycache__/eiffel.cpython-311.pyc | Bin 3054 -> 0 bytes .../lexers/__pycache__/elm.cpython-311.pyc | Bin 3232 -> 0 bytes .../lexers/__pycache__/elpi.cpython-311.pyc | Bin 7140 -> 0 bytes .../lexers/__pycache__/email.cpython-311.pyc | Bin 6065 -> 0 bytes .../lexers/__pycache__/erlang.cpython-311.pyc | Bin 21324 -> 0 bytes .../lexers/__pycache__/esoteric.cpython-311.pyc | Bin 10102 -> 0 bytes .../lexers/__pycache__/ezhil.cpython-311.pyc | Bin 4140 -> 0 bytes .../lexers/__pycache__/factor.cpython-311.pyc | Bin 17030 -> 0 bytes .../lexers/__pycache__/fantom.cpython-311.pyc | Bin 8445 -> 0 bytes .../lexers/__pycache__/felix.cpython-311.pyc | Bin 8370 -> 0 bytes .../lexers/__pycache__/fift.cpython-311.pyc | Bin 1961 -> 0 bytes .../lexers/__pycache__/floscript.cpython-311.pyc | Bin 3071 -> 0 bytes .../lexers/__pycache__/forth.cpython-311.pyc | Bin 5448 -> 0 bytes .../lexers/__pycache__/fortran.cpython-311.pyc | Bin 8932 -> 0 bytes .../lexers/__pycache__/foxpro.cpython-311.pyc | Bin 20716 -> 0 bytes .../lexers/__pycache__/freefem.cpython-311.pyc | Bin 12954 -> 0 bytes .../lexers/__pycache__/func.cpython-311.pyc | Bin 3502 -> 0 bytes .../lexers/__pycache__/functional.cpython-311.pyc | Bin 1246 -> 0 bytes .../lexers/__pycache__/futhark.cpython-311.pyc | Bin 3928 -> 0 bytes .../lexers/__pycache__/gcodelexer.cpython-311.pyc | Bin 1380 -> 0 bytes .../lexers/__pycache__/gdscript.cpython-311.pyc | Bin 7341 -> 0 bytes .../pygments/lexers/__pycache__/go.cpython-311.pyc | Bin 3490 -> 0 bytes .../__pycache__/grammar_notation.cpython-311.pyc | Bin 7819 -> 0 bytes .../lexers/__pycache__/graph.cpython-311.pyc | Bin 4295 -> 0 bytes .../lexers/__pycache__/graphics.cpython-311.pyc | Bin 30259 -> 0 bytes .../lexers/__pycache__/graphql.cpython-311.pyc | Bin 4668 -> 0 bytes .../lexers/__pycache__/graphviz.cpython-311.pyc | Bin 2314 -> 0 bytes .../lexers/__pycache__/gsql.cpython-311.pyc | Bin 4073 -> 0 bytes .../lexers/__pycache__/haskell.cpython-311.pyc | Bin 29499 -> 0 bytes .../lexers/__pycache__/haxe.cpython-311.pyc | Bin 23185 -> 0 bytes .../lexers/__pycache__/hdl.cpython-311.pyc | Bin 17153 -> 0 bytes .../lexers/__pycache__/hexdump.cpython-311.pyc | Bin 3797 -> 0 bytes .../lexers/__pycache__/html.cpython-311.pyc | Bin 19621 -> 0 bytes .../lexers/__pycache__/idl.cpython-311.pyc | Bin 12582 -> 0 bytes .../lexers/__pycache__/igor.cpython-311.pyc | Bin 25839 -> 0 bytes .../lexers/__pycache__/inferno.cpython-311.pyc | Bin 3398 -> 0 bytes .../lexers/__pycache__/installers.cpython-311.pyc | Bin 12913 -> 0 bytes .../lexers/__pycache__/int_fiction.cpython-311.pyc | Bin 48888 -> 0 bytes .../lexers/__pycache__/iolang.cpython-311.pyc | Bin 2228 -> 0 bytes .../pygments/lexers/__pycache__/j.cpython-311.pyc | Bin 4305 -> 0 bytes .../lexers/__pycache__/javascript.cpython-311.pyc | Bin 55789 -> 0 bytes .../lexers/__pycache__/jmespath.cpython-311.pyc | Bin 2583 -> 0 bytes .../lexers/__pycache__/jslt.cpython-311.pyc | Bin 3880 -> 0 bytes .../lexers/__pycache__/jsonnet.cpython-311.pyc | Bin 5178 -> 0 bytes .../lexers/__pycache__/jsx.cpython-311.pyc | Bin 2556 -> 0 bytes .../lexers/__pycache__/julia.cpython-311.pyc | Bin 11399 -> 0 bytes .../lexers/__pycache__/jvm.cpython-311.pyc | Bin 62899 -> 0 bytes .../lexers/__pycache__/kuin.cpython-311.pyc | Bin 10894 -> 0 bytes .../lexers/__pycache__/kusto.cpython-311.pyc | Bin 3019 -> 0 bytes .../lexers/__pycache__/ldap.cpython-311.pyc | Bin 6802 -> 0 bytes .../lexers/__pycache__/lean.cpython-311.pyc | Bin 4256 -> 0 bytes .../lexers/__pycache__/lilypond.cpython-311.pyc | Bin 7770 -> 0 bytes .../lexers/__pycache__/lisp.cpython-311.pyc | Bin 112065 -> 0 bytes .../lexers/__pycache__/macaulay2.cpython-311.pyc | Bin 22387 -> 0 bytes .../lexers/__pycache__/make.cpython-311.pyc | Bin 7122 -> 0 bytes .../lexers/__pycache__/markup.cpython-311.pyc | Bin 59704 -> 0 bytes .../lexers/__pycache__/math.cpython-311.pyc | Bin 1217 -> 0 bytes .../lexers/__pycache__/matlab.cpython-311.pyc | Bin 56538 -> 0 bytes .../lexers/__pycache__/maxima.cpython-311.pyc | Bin 3295 -> 0 bytes .../lexers/__pycache__/meson.cpython-311.pyc | Bin 3705 -> 0 bytes .../lexers/__pycache__/mime.cpython-311.pyc | Bin 11141 -> 0 bytes .../lexers/__pycache__/minecraft.cpython-311.pyc | Bin 10481 -> 0 bytes .../lexers/__pycache__/mips.cpython-311.pyc | Bin 3526 -> 0 bytes .../pygments/lexers/__pycache__/ml.cpython-311.pyc | Bin 24533 -> 0 bytes .../lexers/__pycache__/modeling.cpython-311.pyc | Bin 11931 -> 0 bytes .../lexers/__pycache__/modula2.cpython-311.pyc | Bin 27075 -> 0 bytes .../lexers/__pycache__/monte.cpython-311.pyc | Bin 5122 -> 0 bytes .../lexers/__pycache__/mosel.cpython-311.pyc | Bin 6890 -> 0 bytes .../lexers/__pycache__/ncl.cpython-311.pyc | Bin 46068 -> 0 bytes .../lexers/__pycache__/nimrod.cpython-311.pyc | Bin 6235 -> 0 bytes .../lexers/__pycache__/nit.cpython-311.pyc | Bin 2857 -> 0 bytes .../lexers/__pycache__/nix.cpython-311.pyc | Bin 5382 -> 0 bytes .../lexers/__pycache__/oberon.cpython-311.pyc | Bin 3915 -> 0 bytes .../lexers/__pycache__/objective.cpython-311.pyc | Bin 19880 -> 0 bytes .../lexers/__pycache__/ooc.cpython-311.pyc | Bin 3187 -> 0 bytes .../lexers/__pycache__/openscad.cpython-311.pyc | Bin 3774 -> 0 bytes .../lexers/__pycache__/other.cpython-311.pyc | Bin 3031 -> 0 bytes .../lexers/__pycache__/parasail.cpython-311.pyc | Bin 2925 -> 0 bytes .../lexers/__pycache__/parsers.cpython-311.pyc | Bin 27244 -> 0 bytes .../lexers/__pycache__/pascal.cpython-311.pyc | Bin 24892 -> 0 bytes .../lexers/__pycache__/pawn.cpython-311.pyc | Bin 7535 -> 0 bytes .../lexers/__pycache__/perl.cpython-311.pyc | Bin 39228 -> 0 bytes .../lexers/__pycache__/phix.cpython-311.pyc | Bin 18494 -> 0 bytes .../lexers/__pycache__/php.cpython-311.pyc | Bin 14686 -> 0 bytes .../lexers/__pycache__/pointless.cpython-311.pyc | Bin 2429 -> 0 bytes .../lexers/__pycache__/pony.cpython-311.pyc | Bin 3455 -> 0 bytes .../lexers/__pycache__/praat.cpython-311.pyc | Bin 10712 -> 0 bytes .../lexers/__pycache__/procfile.cpython-311.pyc | Bin 1720 -> 0 bytes .../lexers/__pycache__/prolog.cpython-311.pyc | Bin 10311 -> 0 bytes .../lexers/__pycache__/promql.cpython-311.pyc | Bin 3513 -> 0 bytes .../lexers/__pycache__/prql.cpython-311.pyc | Bin 8257 -> 0 bytes .../lexers/__pycache__/ptx.cpython-311.pyc | Bin 3941 -> 0 bytes .../lexers/__pycache__/python.cpython-311.pyc | Bin 43268 -> 0 bytes .../pygments/lexers/__pycache__/q.cpython-311.pyc | Bin 5843 -> 0 bytes .../lexers/__pycache__/qlik.cpython-311.pyc | Bin 3611 -> 0 bytes .../lexers/__pycache__/qvt.cpython-311.pyc | Bin 5470 -> 0 bytes .../pygments/lexers/__pycache__/r.cpython-311.pyc | Bin 6388 -> 0 bytes .../lexers/__pycache__/rdf.cpython-311.pyc | Bin 12091 -> 0 bytes .../lexers/__pycache__/rebol.cpython-311.pyc | Bin 18951 -> 0 bytes .../lexers/__pycache__/resource.cpython-311.pyc | Bin 3517 -> 0 bytes .../lexers/__pycache__/ride.cpython-311.pyc | Bin 4593 -> 0 bytes .../lexers/__pycache__/rita.cpython-311.pyc | Bin 1619 -> 0 bytes .../lexers/__pycache__/rnc.cpython-311.pyc | Bin 2055 -> 0 bytes .../lexers/__pycache__/roboconf.cpython-311.pyc | Bin 2449 -> 0 bytes .../__pycache__/robotframework.cpython-311.pyc | Bin 32606 -> 0 bytes .../lexers/__pycache__/ruby.cpython-311.pyc | Bin 22906 -> 0 bytes .../lexers/__pycache__/rust.cpython-311.pyc | Bin 7123 -> 0 bytes .../lexers/__pycache__/sas.cpython-311.pyc | Bin 7189 -> 0 bytes .../lexers/__pycache__/savi.cpython-311.pyc | Bin 3902 -> 0 bytes .../lexers/__pycache__/scdoc.cpython-311.pyc | Bin 3075 -> 0 bytes .../lexers/__pycache__/scripting.cpython-311.pyc | Bin 60381 -> 0 bytes .../lexers/__pycache__/sgf.cpython-311.pyc | Bin 2238 -> 0 bytes .../lexers/__pycache__/shell.cpython-311.pyc | Bin 38703 -> 0 bytes .../lexers/__pycache__/sieve.cpython-311.pyc | Bin 2744 -> 0 bytes .../lexers/__pycache__/slash.cpython-311.pyc | Bin 8439 -> 0 bytes .../lexers/__pycache__/smalltalk.cpython-311.pyc | Bin 6763 -> 0 bytes .../lexers/__pycache__/smithy.cpython-311.pyc | Bin 3132 -> 0 bytes .../lexers/__pycache__/smv.cpython-311.pyc | Bin 2896 -> 0 bytes .../lexers/__pycache__/snobol.cpython-311.pyc | Bin 2572 -> 0 bytes .../lexers/__pycache__/solidity.cpython-311.pyc | Bin 3528 -> 0 bytes .../lexers/__pycache__/sophia.cpython-311.pyc | Bin 3712 -> 0 bytes .../lexers/__pycache__/special.cpython-311.pyc | Bin 5912 -> 0 bytes .../lexers/__pycache__/spice.cpython-311.pyc | Bin 3233 -> 0 bytes .../lexers/__pycache__/sql.cpython-311.pyc | Bin 41388 -> 0 bytes .../lexers/__pycache__/srcinfo.cpython-311.pyc | Bin 2090 -> 0 bytes .../lexers/__pycache__/stata.cpython-311.pyc | Bin 4849 -> 0 bytes .../__pycache__/supercollider.cpython-311.pyc | Bin 4064 -> 0 bytes .../lexers/__pycache__/tal.cpython-311.pyc | Bin 2923 -> 0 bytes .../lexers/__pycache__/tcl.cpython-311.pyc | Bin 5495 -> 0 bytes .../lexers/__pycache__/teal.cpython-311.pyc | Bin 3685 -> 0 bytes .../lexers/__pycache__/templates.cpython-311.pyc | Bin 90635 -> 0 bytes .../lexers/__pycache__/teraterm.cpython-311.pyc | Bin 5706 -> 0 bytes .../lexers/__pycache__/testing.cpython-311.pyc | Bin 10166 -> 0 bytes .../lexers/__pycache__/text.cpython-311.pyc | Bin 1845 -> 0 bytes .../lexers/__pycache__/textedit.cpython-311.pyc | Bin 8892 -> 0 bytes .../lexers/__pycache__/textfmts.cpython-311.pyc | Bin 16464 -> 0 bytes .../lexers/__pycache__/theorem.cpython-311.pyc | Bin 14122 -> 0 bytes .../lexers/__pycache__/thingsdb.cpython-311.pyc | Bin 4229 -> 0 bytes .../lexers/__pycache__/tlb.cpython-311.pyc | Bin 1886 -> 0 bytes .../lexers/__pycache__/tls.cpython-311.pyc | Bin 2060 -> 0 bytes .../lexers/__pycache__/tnt.cpython-311.pyc | Bin 14887 -> 0 bytes .../__pycache__/trafficscript.cpython-311.pyc | Bin 1901 -> 0 bytes .../lexers/__pycache__/typoscript.cpython-311.pyc | Bin 7389 -> 0 bytes .../lexers/__pycache__/ul4.cpython-311.pyc | Bin 8112 -> 0 bytes .../lexers/__pycache__/unicon.cpython-311.pyc | Bin 12498 -> 0 bytes .../lexers/__pycache__/urbi.cpython-311.pyc | Bin 6032 -> 0 bytes .../lexers/__pycache__/usd.cpython-311.pyc | Bin 4417 -> 0 bytes .../lexers/__pycache__/varnish.cpython-311.pyc | Bin 7130 -> 0 bytes .../__pycache__/verification.cpython-311.pyc | Bin 4072 -> 0 bytes .../lexers/__pycache__/verifpal.cpython-311.pyc | Bin 3160 -> 0 bytes .../lexers/__pycache__/vip.cpython-311.pyc | Bin 5962 -> 0 bytes .../lexers/__pycache__/vyper.cpython-311.pyc | Bin 4889 -> 0 bytes .../lexers/__pycache__/web.cpython-311.pyc | Bin 1620 -> 0 bytes .../lexers/__pycache__/webassembly.cpython-311.pyc | Bin 5693 -> 0 bytes .../lexers/__pycache__/webidl.cpython-311.pyc | Bin 8601 -> 0 bytes .../lexers/__pycache__/webmisc.cpython-311.pyc | Bin 44936 -> 0 bytes .../lexers/__pycache__/wgsl.cpython-311.pyc | Bin 11108 -> 0 bytes .../lexers/__pycache__/whiley.cpython-311.pyc | Bin 3596 -> 0 bytes .../lexers/__pycache__/wowtoc.cpython-311.pyc | Bin 3432 -> 0 bytes .../lexers/__pycache__/wren.cpython-311.pyc | Bin 3119 -> 0 bytes .../lexers/__pycache__/x10.cpython-311.pyc | Bin 2357 -> 0 bytes .../lexers/__pycache__/xorg.cpython-311.pyc | Bin 1448 -> 0 bytes .../lexers/__pycache__/yang.cpython-311.pyc | Bin 4189 -> 0 bytes .../lexers/__pycache__/yara.cpython-311.pyc | Bin 2779 -> 0 bytes .../lexers/__pycache__/zig.cpython-311.pyc | Bin 3973 -> 0 bytes .../site-packages/pygments/lexers/_ada_builtins.py | 103 - .../site-packages/pygments/lexers/_asy_builtins.py | 1644 ------ .../site-packages/pygments/lexers/_cl_builtins.py | 231 - .../pygments/lexers/_cocoa_builtins.py | 75 - .../pygments/lexers/_csound_builtins.py | 1780 ------- .../site-packages/pygments/lexers/_css_builtins.py | 558 -- .../pygments/lexers/_julia_builtins.py | 411 -- .../pygments/lexers/_lasso_builtins.py | 5326 -------------------- .../pygments/lexers/_lilypond_builtins.py | 4932 ------------------ .../site-packages/pygments/lexers/_lua_builtins.py | 285 -- .../site-packages/pygments/lexers/_mapping.py | 580 --- .../site-packages/pygments/lexers/_mql_builtins.py | 1171 ----- .../pygments/lexers/_mysql_builtins.py | 1335 ----- .../pygments/lexers/_openedge_builtins.py | 2600 ---------- .../site-packages/pygments/lexers/_php_builtins.py | 3325 ------------ .../pygments/lexers/_postgres_builtins.py | 739 --- .../pygments/lexers/_qlik_builtins.py | 666 --- .../pygments/lexers/_scheme_builtins.py | 1609 ------ .../pygments/lexers/_scilab_builtins.py | 3093 ------------ .../pygments/lexers/_sourcemod_builtins.py | 1151 ----- .../pygments/lexers/_stan_builtins.py | 648 --- .../pygments/lexers/_stata_builtins.py | 457 -- .../pygments/lexers/_tsql_builtins.py | 1003 ---- .../site-packages/pygments/lexers/_usd_builtins.py | 112 - .../pygments/lexers/_vbscript_builtins.py | 279 - .../site-packages/pygments/lexers/_vim_builtins.py | 1938 ------- .../site-packages/pygments/lexers/actionscript.py | 245 - .../site-packages/pygments/lexers/ada.py | 144 - .../site-packages/pygments/lexers/agile.py | 23 - .../site-packages/pygments/lexers/algebra.py | 302 -- .../site-packages/pygments/lexers/ambient.py | 76 - .../site-packages/pygments/lexers/amdgpu.py | 54 - .../site-packages/pygments/lexers/ampl.py | 88 - .../site-packages/pygments/lexers/apdlexer.py | 592 --- .../site-packages/pygments/lexers/apl.py | 104 - .../site-packages/pygments/lexers/archetype.py | 319 -- .../site-packages/pygments/lexers/arrow.py | 117 - .../site-packages/pygments/lexers/arturo.py | 250 - .../site-packages/pygments/lexers/asc.py | 55 - .../site-packages/pygments/lexers/asm.py | 1037 ---- .../site-packages/pygments/lexers/asn1.py | 179 - .../site-packages/pygments/lexers/automation.py | 381 -- .../site-packages/pygments/lexers/bare.py | 102 - .../site-packages/pygments/lexers/basic.py | 665 --- .../site-packages/pygments/lexers/bdd.py | 58 - .../site-packages/pygments/lexers/berry.py | 99 - .../site-packages/pygments/lexers/bibtex.py | 159 - .../site-packages/pygments/lexers/blueprint.py | 174 - .../site-packages/pygments/lexers/boa.py | 97 - .../site-packages/pygments/lexers/bqn.py | 110 - .../site-packages/pygments/lexers/business.py | 626 --- .../site-packages/pygments/lexers/c_cpp.py | 411 -- .../site-packages/pygments/lexers/c_like.py | 666 --- .../site-packages/pygments/lexers/capnproto.py | 75 - .../site-packages/pygments/lexers/carbon.py | 96 - .../site-packages/pygments/lexers/cddl.py | 173 - .../site-packages/pygments/lexers/chapel.py | 140 - .../site-packages/pygments/lexers/clean.py | 179 - .../site-packages/pygments/lexers/comal.py | 80 - .../site-packages/pygments/lexers/compiled.py | 34 - .../site-packages/pygments/lexers/configs.py | 1435 ------ .../site-packages/pygments/lexers/console.py | 114 - .../site-packages/pygments/lexers/cplint.py | 44 - .../site-packages/pygments/lexers/crystal.py | 365 -- .../site-packages/pygments/lexers/csound.py | 468 -- .../site-packages/pygments/lexers/css.py | 602 --- .../python3.11/site-packages/pygments/lexers/d.py | 258 - .../site-packages/pygments/lexers/dalvik.py | 127 - .../site-packages/pygments/lexers/data.py | 767 --- .../site-packages/pygments/lexers/dax.py | 136 - .../site-packages/pygments/lexers/devicetree.py | 109 - .../site-packages/pygments/lexers/diff.py | 168 - .../site-packages/pygments/lexers/dns.py | 106 - .../site-packages/pygments/lexers/dotnet.py | 841 ---- .../site-packages/pygments/lexers/dsls.py | 982 ---- .../site-packages/pygments/lexers/dylan.py | 281 -- .../site-packages/pygments/lexers/ecl.py | 145 - .../site-packages/pygments/lexers/eiffel.py | 69 - .../site-packages/pygments/lexers/elm.py | 124 - .../site-packages/pygments/lexers/elpi.py | 173 - .../site-packages/pygments/lexers/email.py | 132 - .../site-packages/pygments/lexers/erlang.py | 528 -- .../site-packages/pygments/lexers/esoteric.py | 301 -- .../site-packages/pygments/lexers/ezhil.py | 77 - .../site-packages/pygments/lexers/factor.py | 364 -- .../site-packages/pygments/lexers/fantom.py | 251 - .../site-packages/pygments/lexers/felix.py | 276 - .../site-packages/pygments/lexers/fift.py | 67 - .../site-packages/pygments/lexers/floscript.py | 82 - .../site-packages/pygments/lexers/forth.py | 179 - .../site-packages/pygments/lexers/fortran.py | 213 - .../site-packages/pygments/lexers/foxpro.py | 427 -- .../site-packages/pygments/lexers/freefem.py | 894 ---- .../site-packages/pygments/lexers/func.py | 108 - .../site-packages/pygments/lexers/functional.py | 20 - .../site-packages/pygments/lexers/futhark.py | 106 - .../site-packages/pygments/lexers/gcodelexer.py | 35 - .../site-packages/pygments/lexers/gdscript.py | 188 - .../python3.11/site-packages/pygments/lexers/go.py | 98 - .../pygments/lexers/grammar_notation.py | 265 - .../site-packages/pygments/lexers/graph.py | 109 - .../site-packages/pygments/lexers/graphics.py | 798 --- .../site-packages/pygments/lexers/graphql.py | 177 - .../site-packages/pygments/lexers/graphviz.py | 59 - .../site-packages/pygments/lexers/gsql.py | 104 - .../site-packages/pygments/lexers/haskell.py | 871 ---- .../site-packages/pygments/lexers/haxe.py | 937 ---- .../site-packages/pygments/lexers/hdl.py | 465 -- .../site-packages/pygments/lexers/hexdump.py | 102 - .../site-packages/pygments/lexers/html.py | 623 --- .../site-packages/pygments/lexers/idl.py | 285 -- .../site-packages/pygments/lexers/igor.py | 436 -- .../site-packages/pygments/lexers/inferno.py | 96 - .../site-packages/pygments/lexers/installers.py | 327 -- .../site-packages/pygments/lexers/int_fiction.py | 1382 ----- .../site-packages/pygments/lexers/iolang.py | 62 - .../python3.11/site-packages/pygments/lexers/j.py | 152 - .../site-packages/pygments/lexers/javascript.py | 1588 ------ .../site-packages/pygments/lexers/jmespath.py | 68 - .../site-packages/pygments/lexers/jslt.py | 95 - .../site-packages/pygments/lexers/jsonnet.py | 168 - .../site-packages/pygments/lexers/jsx.py | 76 - .../site-packages/pygments/lexers/julia.py | 294 -- .../site-packages/pygments/lexers/jvm.py | 1820 ------- .../site-packages/pygments/lexers/kuin.py | 333 -- .../site-packages/pygments/lexers/kusto.py | 94 - .../site-packages/pygments/lexers/ldap.py | 157 - .../site-packages/pygments/lexers/lean.py | 122 - .../site-packages/pygments/lexers/lilypond.py | 226 - .../site-packages/pygments/lexers/lisp.py | 2848 ----------- .../site-packages/pygments/lexers/macaulay2.py | 1755 ------- .../site-packages/pygments/lexers/make.py | 211 - .../site-packages/pygments/lexers/markup.py | 1550 ------ .../site-packages/pygments/lexers/math.py | 20 - .../site-packages/pygments/lexers/matlab.py | 3308 ------------ .../site-packages/pygments/lexers/maxima.py | 85 - .../site-packages/pygments/lexers/meson.py | 140 - .../site-packages/pygments/lexers/mime.py | 210 - .../site-packages/pygments/lexers/minecraft.py | 394 -- .../site-packages/pygments/lexers/mips.py | 128 - .../python3.11/site-packages/pygments/lexers/ml.py | 960 ---- .../site-packages/pygments/lexers/modeling.py | 369 -- .../site-packages/pygments/lexers/modula2.py | 1580 ------ .../site-packages/pygments/lexers/monte.py | 204 - .../site-packages/pygments/lexers/mosel.py | 447 -- .../site-packages/pygments/lexers/ncl.py | 893 ---- .../site-packages/pygments/lexers/nimrod.py | 200 - .../site-packages/pygments/lexers/nit.py | 64 - .../site-packages/pygments/lexers/nix.py | 145 - .../site-packages/pygments/lexers/oberon.py | 120 - .../site-packages/pygments/lexers/objective.py | 505 -- .../site-packages/pygments/lexers/ooc.py | 85 - .../site-packages/pygments/lexers/openscad.py | 97 - .../site-packages/pygments/lexers/other.py | 40 - .../site-packages/pygments/lexers/parasail.py | 79 - .../site-packages/pygments/lexers/parsers.py | 801 --- .../site-packages/pygments/lexers/pascal.py | 641 --- .../site-packages/pygments/lexers/pawn.py | 202 - .../site-packages/pygments/lexers/perl.py | 733 --- .../site-packages/pygments/lexers/phix.py | 364 -- .../site-packages/pygments/lexers/php.py | 335 -- .../site-packages/pygments/lexers/pointless.py | 71 - .../site-packages/pygments/lexers/pony.py | 93 - .../site-packages/pygments/lexers/praat.py | 304 -- .../site-packages/pygments/lexers/procfile.py | 42 - .../site-packages/pygments/lexers/prolog.py | 309 -- .../site-packages/pygments/lexers/promql.py | 175 - .../site-packages/pygments/lexers/prql.py | 252 - .../site-packages/pygments/lexers/ptx.py | 120 - .../site-packages/pygments/lexers/python.py | 1198 ----- .../python3.11/site-packages/pygments/lexers/q.py | 188 - .../site-packages/pygments/lexers/qlik.py | 117 - .../site-packages/pygments/lexers/qvt.py | 151 - .../python3.11/site-packages/pygments/lexers/r.py | 190 - .../site-packages/pygments/lexers/rdf.py | 466 -- .../site-packages/pygments/lexers/rebol.py | 419 -- .../site-packages/pygments/lexers/resource.py | 84 - .../site-packages/pygments/lexers/ride.py | 139 - .../site-packages/pygments/lexers/rita.py | 43 - .../site-packages/pygments/lexers/rnc.py | 67 - .../site-packages/pygments/lexers/roboconf.py | 81 - .../pygments/lexers/robotframework.py | 552 -- .../site-packages/pygments/lexers/ruby.py | 516 -- .../site-packages/pygments/lexers/rust.py | 223 - .../site-packages/pygments/lexers/sas.py | 227 - .../site-packages/pygments/lexers/savi.py | 170 - .../site-packages/pygments/lexers/scdoc.py | 86 - .../site-packages/pygments/lexers/scripting.py | 1286 ----- .../site-packages/pygments/lexers/sgf.py | 60 - .../site-packages/pygments/lexers/shell.py | 920 ---- .../site-packages/pygments/lexers/sieve.py | 78 - .../site-packages/pygments/lexers/slash.py | 184 - .../site-packages/pygments/lexers/smalltalk.py | 196 - .../site-packages/pygments/lexers/smithy.py | 78 - .../site-packages/pygments/lexers/smv.py | 78 - .../site-packages/pygments/lexers/snobol.py | 82 - .../site-packages/pygments/lexers/solidity.py | 87 - .../site-packages/pygments/lexers/sophia.py | 103 - .../site-packages/pygments/lexers/special.py | 116 - .../site-packages/pygments/lexers/spice.py | 70 - .../site-packages/pygments/lexers/sql.py | 1027 ---- .../site-packages/pygments/lexers/srcinfo.py | 62 - .../site-packages/pygments/lexers/stata.py | 171 - .../site-packages/pygments/lexers/supercollider.py | 95 - .../site-packages/pygments/lexers/tal.py | 77 - .../site-packages/pygments/lexers/tcl.py | 149 - .../site-packages/pygments/lexers/teal.py | 89 - .../site-packages/pygments/lexers/templates.py | 2296 --------- .../site-packages/pygments/lexers/teraterm.py | 326 -- .../site-packages/pygments/lexers/testing.py | 210 - .../site-packages/pygments/lexers/text.py | 26 - .../site-packages/pygments/lexers/textedit.py | 202 - .../site-packages/pygments/lexers/textfmts.py | 436 -- .../site-packages/pygments/lexers/theorem.py | 391 -- .../site-packages/pygments/lexers/thingsdb.py | 116 - .../site-packages/pygments/lexers/tlb.py | 57 - .../site-packages/pygments/lexers/tls.py | 55 - .../site-packages/pygments/lexers/tnt.py | 271 - .../site-packages/pygments/lexers/trafficscript.py | 51 - .../site-packages/pygments/lexers/typoscript.py | 217 - .../site-packages/pygments/lexers/ul4.py | 267 - .../site-packages/pygments/lexers/unicon.py | 411 -- .../site-packages/pygments/lexers/urbi.py | 145 - .../site-packages/pygments/lexers/usd.py | 90 - .../site-packages/pygments/lexers/varnish.py | 189 - .../site-packages/pygments/lexers/verification.py | 114 - .../site-packages/pygments/lexers/verifpal.py | 66 - .../site-packages/pygments/lexers/vip.py | 152 - .../site-packages/pygments/lexers/vyper.py | 141 - .../site-packages/pygments/lexers/web.py | 23 - .../site-packages/pygments/lexers/webassembly.py | 120 - .../site-packages/pygments/lexers/webidl.py | 299 -- .../site-packages/pygments/lexers/webmisc.py | 1010 ---- .../site-packages/pygments/lexers/wgsl.py | 407 -- .../site-packages/pygments/lexers/whiley.py | 116 - .../site-packages/pygments/lexers/wowtoc.py | 120 - .../site-packages/pygments/lexers/wren.py | 99 - .../site-packages/pygments/lexers/x10.py | 67 - .../site-packages/pygments/lexers/xorg.py | 37 - .../site-packages/pygments/lexers/yang.py | 104 - .../site-packages/pygments/lexers/yara.py | 70 - .../site-packages/pygments/lexers/zig.py | 124 - .../python3.11/site-packages/pygments/modeline.py | 43 - .../python3.11/site-packages/pygments/plugin.py | 88 - .../python3.11/site-packages/pygments/regexopt.py | 91 - .../python3.11/site-packages/pygments/scanner.py | 104 - .../python3.11/site-packages/pygments/sphinxext.py | 239 - .../lib/python3.11/site-packages/pygments/style.py | 203 - .../site-packages/pygments/styles/__init__.py | 61 - .../styles/__pycache__/__init__.cpython-311.pyc | Bin 3056 -> 0 bytes .../styles/__pycache__/_mapping.cpython-311.pyc | Bin 3607 -> 0 bytes .../styles/__pycache__/abap.cpython-311.pyc | Bin 1205 -> 0 bytes .../styles/__pycache__/algol.cpython-311.pyc | Bin 2570 -> 0 bytes .../styles/__pycache__/algol_nu.cpython-311.pyc | Bin 2585 -> 0 bytes .../styles/__pycache__/arduino.cpython-311.pyc | Bin 3753 -> 0 bytes .../styles/__pycache__/autumn.cpython-311.pyc | Bin 2712 -> 0 bytes .../styles/__pycache__/borland.cpython-311.pyc | Bin 2196 -> 0 bytes .../pygments/styles/__pycache__/bw.cpython-311.pyc | Bin 1850 -> 0 bytes .../styles/__pycache__/colorful.cpython-311.pyc | Bin 3364 -> 0 bytes .../styles/__pycache__/default.cpython-311.pyc | Bin 3005 -> 0 bytes .../styles/__pycache__/dracula.cpython-311.pyc | Bin 3018 -> 0 bytes .../styles/__pycache__/emacs.cpython-311.pyc | Bin 3033 -> 0 bytes .../styles/__pycache__/friendly.cpython-311.pyc | Bin 3129 -> 0 bytes .../__pycache__/friendly_grayscale.cpython-311.pyc | Bin 3339 -> 0 bytes .../styles/__pycache__/fruity.cpython-311.pyc | Bin 1962 -> 0 bytes .../styles/__pycache__/gh_dark.cpython-311.pyc | Bin 3825 -> 0 bytes .../styles/__pycache__/gruvbox.cpython-311.pyc | Bin 4012 -> 0 bytes .../styles/__pycache__/igor.cpython-311.pyc | Bin 1198 -> 0 bytes .../styles/__pycache__/inkpot.cpython-311.pyc | Bin 2842 -> 0 bytes .../styles/__pycache__/lightbulb.cpython-311.pyc | Bin 4370 -> 0 bytes .../styles/__pycache__/lilypond.cpython-311.pyc | Bin 2767 -> 0 bytes .../styles/__pycache__/lovelace.cpython-311.pyc | Bin 3899 -> 0 bytes .../styles/__pycache__/manni.cpython-311.pyc | Bin 3264 -> 0 bytes .../styles/__pycache__/material.cpython-311.pyc | Bin 4363 -> 0 bytes .../styles/__pycache__/monokai.cpython-311.pyc | Bin 4308 -> 0 bytes .../styles/__pycache__/murphy.cpython-311.pyc | Bin 3314 -> 0 bytes .../styles/__pycache__/native.cpython-311.pyc | Bin 2827 -> 0 bytes .../styles/__pycache__/nord.cpython-311.pyc | Bin 5113 -> 0 bytes .../styles/__pycache__/onedark.cpython-311.pyc | Bin 2213 -> 0 bytes .../__pycache__/paraiso_dark.cpython-311.pyc | Bin 4643 -> 0 bytes .../__pycache__/paraiso_light.cpython-311.pyc | Bin 4649 -> 0 bytes .../styles/__pycache__/pastie.cpython-311.pyc | Bin 3175 -> 0 bytes .../styles/__pycache__/perldoc.cpython-311.pyc | Bin 2884 -> 0 bytes .../__pycache__/rainbow_dash.cpython-311.pyc | Bin 4241 -> 0 bytes .../styles/__pycache__/rrt.cpython-311.pyc | Bin 1450 -> 0 bytes .../styles/__pycache__/sas.cpython-311.pyc | Bin 1888 -> 0 bytes .../styles/__pycache__/solarized.cpython-311.pyc | Bin 5904 -> 0 bytes .../styles/__pycache__/staroffice.cpython-311.pyc | Bin 1218 -> 0 bytes .../styles/__pycache__/stata_dark.cpython-311.pyc | Bin 1763 -> 0 bytes .../styles/__pycache__/stata_light.cpython-311.pyc | Bin 1771 -> 0 bytes .../styles/__pycache__/tango.cpython-311.pyc | Bin 5639 -> 0 bytes .../styles/__pycache__/trac.cpython-311.pyc | Bin 2471 -> 0 bytes .../styles/__pycache__/vim.cpython-311.pyc | Bin 2398 -> 0 bytes .../pygments/styles/__pycache__/vs.cpython-311.pyc | Bin 1511 -> 0 bytes .../styles/__pycache__/xcode.cpython-311.pyc | Bin 1846 -> 0 bytes .../styles/__pycache__/zenburn.cpython-311.pyc | Bin 3203 -> 0 bytes .../site-packages/pygments/styles/_mapping.py | 53 - .../site-packages/pygments/styles/abap.py | 32 - .../site-packages/pygments/styles/algol.py | 65 - .../site-packages/pygments/styles/algol_nu.py | 65 - .../site-packages/pygments/styles/arduino.py | 100 - .../site-packages/pygments/styles/autumn.py | 67 - .../site-packages/pygments/styles/borland.py | 53 - .../python3.11/site-packages/pygments/styles/bw.py | 52 - .../site-packages/pygments/styles/colorful.py | 83 - .../site-packages/pygments/styles/default.py | 76 - .../site-packages/pygments/styles/dracula.py | 90 - .../site-packages/pygments/styles/emacs.py | 75 - .../site-packages/pygments/styles/friendly.py | 76 - .../pygments/styles/friendly_grayscale.py | 80 - .../site-packages/pygments/styles/fruity.py | 47 - .../site-packages/pygments/styles/gh_dark.py | 113 - .../site-packages/pygments/styles/gruvbox.py | 118 - .../site-packages/pygments/styles/igor.py | 32 - .../site-packages/pygments/styles/inkpot.py | 72 - .../site-packages/pygments/styles/lightbulb.py | 110 - .../site-packages/pygments/styles/lilypond.py | 62 - .../site-packages/pygments/styles/lovelace.py | 100 - .../site-packages/pygments/styles/manni.py | 79 - .../site-packages/pygments/styles/material.py | 124 - .../site-packages/pygments/styles/monokai.py | 112 - .../site-packages/pygments/styles/murphy.py | 82 - .../site-packages/pygments/styles/native.py | 70 - .../site-packages/pygments/styles/nord.py | 156 - .../site-packages/pygments/styles/onedark.py | 63 - .../site-packages/pygments/styles/paraiso_dark.py | 124 - .../site-packages/pygments/styles/paraiso_light.py | 124 - .../site-packages/pygments/styles/pastie.py | 78 - .../site-packages/pygments/styles/perldoc.py | 73 - .../site-packages/pygments/styles/rainbow_dash.py | 95 - .../site-packages/pygments/styles/rrt.py | 39 - .../site-packages/pygments/styles/sas.py | 46 - .../site-packages/pygments/styles/solarized.py | 144 - .../site-packages/pygments/styles/staroffice.py | 31 - .../site-packages/pygments/styles/stata_dark.py | 42 - .../site-packages/pygments/styles/stata_light.py | 42 - .../site-packages/pygments/styles/tango.py | 143 - .../site-packages/pygments/styles/trac.py | 66 - .../site-packages/pygments/styles/vim.py | 67 - .../python3.11/site-packages/pygments/styles/vs.py | 41 - .../site-packages/pygments/styles/xcode.py | 53 - .../site-packages/pygments/styles/zenburn.py | 83 - .../lib/python3.11/site-packages/pygments/token.py | 214 - .../python3.11/site-packages/pygments/unistring.py | 153 - venv/lib/python3.11/site-packages/pygments/util.py | 330 -- 642 files changed, 119810 deletions(-) delete mode 100644 venv/lib/python3.11/site-packages/pygments/__init__.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/__main__.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/cmdline.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/console.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/filter.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/filters/__init__.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatter.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__init__.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc delete mode 100755 venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/groff.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/html.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/img.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/irc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/latex.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/other.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/pangomarkup.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/rtf.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/svg.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/terminal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/formatters/terminal256.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexer.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__init__.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/__init__.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_ada_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_asy_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cl_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_cocoa_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_csound_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_css_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_julia_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lasso_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lilypond_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_lua_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mapping.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mql_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_mysql_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_openedge_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_php_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_postgres_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_qlik_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scheme_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_scilab_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_sourcemod_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stan_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_stata_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_tsql_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_usd_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vbscript_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/_vim_builtins.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/actionscript.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ada.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/agile.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/algebra.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ambient.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/amdgpu.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ampl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apdlexer.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/apl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/archetype.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arrow.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/arturo.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asm.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/asn1.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/automation.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bare.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/basic.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bdd.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/berry.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bibtex.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/blueprint.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/boa.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/bqn.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/business.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_cpp.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/c_like.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/capnproto.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/carbon.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cddl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/chapel.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/clean.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/comal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/compiled.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/configs.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/console.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/cplint.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/crystal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/csound.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/css.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/d.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dalvik.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/data.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dax.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/devicetree.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/diff.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dns.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dotnet.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dsls.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/dylan.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ecl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/eiffel.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elm.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/elpi.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/email.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/erlang.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/esoteric.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ezhil.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/factor.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fantom.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/felix.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fift.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/floscript.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/forth.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/fortran.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/foxpro.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/freefem.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/func.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/functional.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/futhark.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gcodelexer.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gdscript.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/go.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/grammar_notation.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graph.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphics.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphql.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/graphviz.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/gsql.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haskell.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/haxe.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hdl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/hexdump.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/html.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/idl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/igor.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/inferno.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/installers.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/int_fiction.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/iolang.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/j.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/javascript.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jmespath.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jslt.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsonnet.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jsx.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/julia.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/jvm.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kuin.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/kusto.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ldap.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lean.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lilypond.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/lisp.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/macaulay2.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/make.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/markup.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/math.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/matlab.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/maxima.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/meson.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mime.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/minecraft.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mips.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ml.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modeling.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/modula2.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/monte.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/mosel.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ncl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nimrod.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nit.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/nix.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/oberon.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/objective.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ooc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/openscad.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/other.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parasail.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/parsers.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pascal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pawn.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/perl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/phix.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/php.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pointless.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/pony.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/praat.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/procfile.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prolog.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/promql.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/prql.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ptx.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/python.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/q.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qlik.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/qvt.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/r.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rdf.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rebol.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/resource.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ride.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rita.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rnc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/roboconf.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/robotframework.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ruby.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/rust.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sas.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/savi.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scdoc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/scripting.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sgf.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/shell.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sieve.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/slash.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smalltalk.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smithy.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/smv.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/snobol.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/solidity.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sophia.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/special.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/spice.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/sql.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/srcinfo.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/stata.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/supercollider.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tcl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/templates.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/teraterm.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/testing.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/text.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textedit.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/textfmts.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/theorem.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/thingsdb.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tlb.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tls.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/tnt.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/trafficscript.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/typoscript.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/ul4.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/unicon.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/urbi.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/usd.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/varnish.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verification.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/verifpal.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vip.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/vyper.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/web.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webassembly.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webidl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/webmisc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wgsl.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/whiley.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wowtoc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/wren.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/x10.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/xorg.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yang.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/yara.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/__pycache__/zig.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_ada_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_asy_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_cl_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_cocoa_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_csound_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_css_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_julia_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_lasso_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_lilypond_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_lua_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_mapping.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_mql_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_mysql_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_openedge_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_php_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_postgres_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_qlik_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_scheme_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_scilab_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_sourcemod_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_stan_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_stata_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_tsql_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_usd_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_vbscript_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/_vim_builtins.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/actionscript.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ada.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/agile.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/algebra.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ambient.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/amdgpu.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ampl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/apdlexer.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/apl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/archetype.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/arrow.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/arturo.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/asc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/asm.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/asn1.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/automation.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/bare.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/basic.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/bdd.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/berry.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/bibtex.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/blueprint.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/boa.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/bqn.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/business.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/c_cpp.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/c_like.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/capnproto.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/carbon.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/cddl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/chapel.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/clean.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/comal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/compiled.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/configs.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/console.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/cplint.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/crystal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/csound.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/css.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/d.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/dalvik.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/data.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/dax.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/devicetree.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/diff.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/dns.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/dotnet.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/dsls.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/dylan.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ecl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/eiffel.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/elm.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/elpi.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/email.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/erlang.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/esoteric.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ezhil.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/factor.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/fantom.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/felix.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/fift.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/floscript.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/forth.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/fortran.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/foxpro.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/freefem.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/func.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/functional.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/futhark.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/gcodelexer.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/gdscript.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/go.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/grammar_notation.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/graph.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/graphics.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/graphql.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/graphviz.py delete mode 100755 venv/lib/python3.11/site-packages/pygments/lexers/gsql.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/haskell.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/haxe.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/hdl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/hexdump.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/html.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/idl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/igor.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/inferno.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/installers.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/int_fiction.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/iolang.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/j.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/javascript.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/jmespath.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/jslt.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/jsonnet.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/jsx.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/julia.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/jvm.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/kuin.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/kusto.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ldap.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/lean.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/lilypond.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/lisp.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/macaulay2.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/make.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/markup.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/math.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/matlab.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/maxima.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/meson.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/mime.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/minecraft.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/mips.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ml.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/modeling.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/modula2.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/monte.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/mosel.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ncl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/nit.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/nix.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/oberon.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/objective.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ooc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/openscad.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/other.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/parasail.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/parsers.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/pascal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/pawn.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/perl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/phix.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/php.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/pointless.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/pony.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/praat.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/procfile.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/prolog.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/promql.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/prql.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ptx.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/python.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/q.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/qlik.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/qvt.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/r.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/rdf.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/rebol.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/resource.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ride.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/rita.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/rnc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ruby.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/rust.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/sas.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/savi.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/scripting.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/sgf.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/shell.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/sieve.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/slash.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/smithy.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/smv.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/snobol.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/solidity.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/sophia.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/special.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/spice.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/sql.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/srcinfo.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/stata.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/supercollider.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/tal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/tcl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/teal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/templates.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/teraterm.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/testing.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/text.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/textedit.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/textfmts.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/theorem.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/thingsdb.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/tlb.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/tls.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/tnt.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/trafficscript.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/typoscript.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/ul4.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/unicon.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/urbi.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/usd.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/varnish.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/verification.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/verifpal.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/vip.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/vyper.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/web.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/webassembly.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/webidl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/webmisc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/wgsl.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/whiley.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/wowtoc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/wren.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/x10.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/xorg.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/yang.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/yara.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/lexers/zig.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/modeline.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/plugin.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/regexopt.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/scanner.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/sphinxext.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/style.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__init__.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/__init__.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/_mapping.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/abap.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/algol_nu.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/arduino.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/autumn.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/borland.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/bw.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/colorful.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/default.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/dracula.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/emacs.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/friendly_grayscale.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/fruity.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gh_dark.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/gruvbox.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/igor.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/inkpot.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lightbulb.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lilypond.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/lovelace.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/manni.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/material.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/monokai.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/murphy.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/native.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/nord.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/onedark.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_dark.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/paraiso_light.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/pastie.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/perldoc.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rainbow_dash.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/rrt.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/sas.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/solarized.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/staroffice.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_dark.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/stata_light.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/tango.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/trac.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vim.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/vs.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/xcode.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/__pycache__/zenburn.cpython-311.pyc delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/_mapping.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/abap.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/algol.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/algol_nu.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/arduino.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/autumn.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/borland.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/bw.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/colorful.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/default.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/dracula.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/emacs.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/friendly.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/friendly_grayscale.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/fruity.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/gh_dark.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/gruvbox.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/igor.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/inkpot.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/lightbulb.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/lilypond.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/lovelace.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/manni.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/material.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/monokai.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/murphy.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/native.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/nord.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/onedark.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/paraiso_dark.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/paraiso_light.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/pastie.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/perldoc.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/rainbow_dash.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/rrt.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/sas.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/solarized.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/staroffice.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/stata_dark.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/stata_light.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/tango.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/trac.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/vim.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/vs.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/xcode.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/styles/zenburn.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/token.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/unistring.py delete mode 100644 venv/lib/python3.11/site-packages/pygments/util.py (limited to 'venv/lib/python3.11/site-packages/pygments') diff --git a/venv/lib/python3.11/site-packages/pygments/__init__.py b/venv/lib/python3.11/site-packages/pygments/__init__.py deleted file mode 100644 index 6b77c46..0000000 --- a/venv/lib/python3.11/site-packages/pygments/__init__.py +++ /dev/null @@ -1,82 +0,0 @@ -""" - Pygments - ~~~~~~~~ - - Pygments is a syntax highlighting package written in Python. - - It is a generic syntax highlighter for general use in all kinds of software - such as forum systems, wikis or other applications that need to prettify - source code. Highlights are: - - * a wide range of common languages and markup formats is supported - * special attention is paid to details, increasing quality by a fair amount - * support for new languages and formats are added easily - * a number of output formats, presently HTML, LaTeX, RTF, SVG, all image - formats that PIL supports, and ANSI sequences - * it is usable as a command-line tool and as a library - * ... and it highlights even Brainfuck! - - The `Pygments master branch`_ is installable with ``easy_install Pygments==dev``. - - .. _Pygments master branch: - https://github.com/pygments/pygments/archive/master.zip#egg=Pygments-dev - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" -from io import StringIO, BytesIO - -__version__ = '2.17.2' -__docformat__ = 'restructuredtext' - -__all__ = ['lex', 'format', 'highlight'] - - -def lex(code, lexer): - """ - Lex `code` with the `lexer` (must be a `Lexer` instance) - and return an iterable of tokens. Currently, this only calls - `lexer.get_tokens()`. - """ - try: - return lexer.get_tokens(code) - except TypeError: - # Heuristic to catch a common mistake. - from pygments.lexer import RegexLexer - if isinstance(lexer, type) and issubclass(lexer, RegexLexer): - raise TypeError('lex() argument must be a lexer instance, ' - 'not a class') - raise - - -def format(tokens, formatter, outfile=None): # pylint: disable=redefined-builtin - """ - Format ``tokens`` (an iterable of tokens) with the formatter ``formatter`` - (a `Formatter` instance). - - If ``outfile`` is given and a valid file object (an object with a - ``write`` method), the result will be written to it, otherwise it - is returned as a string. - """ - try: - if not outfile: - realoutfile = getattr(formatter, 'encoding', None) and BytesIO() or StringIO() - formatter.format(tokens, realoutfile) - return realoutfile.getvalue() - else: - formatter.format(tokens, outfile) - except TypeError: - # Heuristic to catch a common mistake. - from pygments.formatter import Formatter - if isinstance(formatter, type) and issubclass(formatter, Formatter): - raise TypeError('format() argument must be a formatter instance, ' - 'not a class') - raise - - -def highlight(code, lexer, formatter, outfile=None): - """ - This is the most high-level highlighting function. It combines `lex` and - `format` in one function. - """ - return format(lex(code, lexer), formatter, outfile) diff --git a/venv/lib/python3.11/site-packages/pygments/__main__.py b/venv/lib/python3.11/site-packages/pygments/__main__.py deleted file mode 100644 index 5eb2c74..0000000 --- a/venv/lib/python3.11/site-packages/pygments/__main__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" - pygments.__main__ - ~~~~~~~~~~~~~~~~~ - - Main entry point for ``python -m pygments``. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import sys -import pygments.cmdline - -try: - sys.exit(pygments.cmdline.main(sys.argv)) -except KeyboardInterrupt: - sys.exit(1) diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index 19a66d8..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc deleted file mode 100644 index 9e17973..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/__main__.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc deleted file mode 100644 index c8701eb..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/cmdline.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc deleted file mode 100644 index f6053f7..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/console.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc deleted file mode 100644 index 9875b8a..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/filter.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc deleted file mode 100644 index ed4b5ff..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/formatter.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc deleted file mode 100644 index 4bf40ea..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/lexer.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc deleted file mode 100644 index 8115386..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/modeline.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc deleted file mode 100644 index 33a25b3..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/plugin.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc deleted file mode 100644 index d8be4e4..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/regexopt.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc deleted file mode 100644 index 2d356e9..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/scanner.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc deleted file mode 100644 index f8dbe70..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/sphinxext.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc deleted file mode 100644 index e4fd479..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/style.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc deleted file mode 100644 index 1617029..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/token.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc deleted file mode 100644 index 1334a03..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/unistring.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc deleted file mode 100644 index 3f1a0e1..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/__pycache__/util.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/cmdline.py b/venv/lib/python3.11/site-packages/pygments/cmdline.py deleted file mode 100644 index 435231e..0000000 --- a/venv/lib/python3.11/site-packages/pygments/cmdline.py +++ /dev/null @@ -1,668 +0,0 @@ -""" - pygments.cmdline - ~~~~~~~~~~~~~~~~ - - Command line interface. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import os -import sys -import shutil -import argparse -from textwrap import dedent - -from pygments import __version__, highlight -from pygments.util import ClassNotFound, OptionError, docstring_headline, \ - guess_decode, guess_decode_from_terminal, terminal_encoding, \ - UnclosingTextIOWrapper -from pygments.lexers import get_all_lexers, get_lexer_by_name, guess_lexer, \ - load_lexer_from_file, get_lexer_for_filename, find_lexer_class_for_filename -from pygments.lexers.special import TextLexer -from pygments.formatters.latex import LatexEmbeddedLexer, LatexFormatter -from pygments.formatters import get_all_formatters, get_formatter_by_name, \ - load_formatter_from_file, get_formatter_for_filename, find_formatter_class -from pygments.formatters.terminal import TerminalFormatter -from pygments.formatters.terminal256 import Terminal256Formatter, TerminalTrueColorFormatter -from pygments.filters import get_all_filters, find_filter_class -from pygments.styles import get_all_styles, get_style_by_name - - -def _parse_options(o_strs): - opts = {} - if not o_strs: - return opts - for o_str in o_strs: - if not o_str.strip(): - continue - o_args = o_str.split(',') - for o_arg in o_args: - o_arg = o_arg.strip() - try: - o_key, o_val = o_arg.split('=', 1) - o_key = o_key.strip() - o_val = o_val.strip() - except ValueError: - opts[o_arg] = True - else: - opts[o_key] = o_val - return opts - - -def _parse_filters(f_strs): - filters = [] - if not f_strs: - return filters - for f_str in f_strs: - if ':' in f_str: - fname, fopts = f_str.split(':', 1) - filters.append((fname, _parse_options([fopts]))) - else: - filters.append((f_str, {})) - return filters - - -def _print_help(what, name): - try: - if what == 'lexer': - cls = get_lexer_by_name(name) - print("Help on the %s lexer:" % cls.name) - print(dedent(cls.__doc__)) - elif what == 'formatter': - cls = find_formatter_class(name) - print("Help on the %s formatter:" % cls.name) - print(dedent(cls.__doc__)) - elif what == 'filter': - cls = find_filter_class(name) - print("Help on the %s filter:" % name) - print(dedent(cls.__doc__)) - return 0 - except (AttributeError, ValueError): - print("%s not found!" % what, file=sys.stderr) - return 1 - - -def _print_list(what): - if what == 'lexer': - print() - print("Lexers:") - print("~~~~~~~") - - info = [] - for fullname, names, exts, _ in get_all_lexers(): - tup = (', '.join(names)+':', fullname, - exts and '(filenames ' + ', '.join(exts) + ')' or '') - info.append(tup) - info.sort() - for i in info: - print(('* %s\n %s %s') % i) - - elif what == 'formatter': - print() - print("Formatters:") - print("~~~~~~~~~~~") - - info = [] - for cls in get_all_formatters(): - doc = docstring_headline(cls) - tup = (', '.join(cls.aliases) + ':', doc, cls.filenames and - '(filenames ' + ', '.join(cls.filenames) + ')' or '') - info.append(tup) - info.sort() - for i in info: - print(('* %s\n %s %s') % i) - - elif what == 'filter': - print() - print("Filters:") - print("~~~~~~~~") - - for name in get_all_filters(): - cls = find_filter_class(name) - print("* " + name + ':') - print(" %s" % docstring_headline(cls)) - - elif what == 'style': - print() - print("Styles:") - print("~~~~~~~") - - for name in get_all_styles(): - cls = get_style_by_name(name) - print("* " + name + ':') - print(" %s" % docstring_headline(cls)) - - -def _print_list_as_json(requested_items): - import json - result = {} - if 'lexer' in requested_items: - info = {} - for fullname, names, filenames, mimetypes in get_all_lexers(): - info[fullname] = { - 'aliases': names, - 'filenames': filenames, - 'mimetypes': mimetypes - } - result['lexers'] = info - - if 'formatter' in requested_items: - info = {} - for cls in get_all_formatters(): - doc = docstring_headline(cls) - info[cls.name] = { - 'aliases': cls.aliases, - 'filenames': cls.filenames, - 'doc': doc - } - result['formatters'] = info - - if 'filter' in requested_items: - info = {} - for name in get_all_filters(): - cls = find_filter_class(name) - info[name] = { - 'doc': docstring_headline(cls) - } - result['filters'] = info - - if 'style' in requested_items: - info = {} - for name in get_all_styles(): - cls = get_style_by_name(name) - info[name] = { - 'doc': docstring_headline(cls) - } - result['styles'] = info - - json.dump(result, sys.stdout) - -def main_inner(parser, argns): - if argns.help: - parser.print_help() - return 0 - - if argns.V: - print('Pygments version %s, (c) 2006-2023 by Georg Brandl, Matthäus ' - 'Chajdas and contributors.' % __version__) - return 0 - - def is_only_option(opt): - return not any(v for (k, v) in vars(argns).items() if k != opt) - - # handle ``pygmentize -L`` - if argns.L is not None: - arg_set = set() - for k, v in vars(argns).items(): - if v: - arg_set.add(k) - - arg_set.discard('L') - arg_set.discard('json') - - if arg_set: - parser.print_help(sys.stderr) - return 2 - - # print version - if not argns.json: - main(['', '-V']) - allowed_types = {'lexer', 'formatter', 'filter', 'style'} - largs = [arg.rstrip('s') for arg in argns.L] - if any(arg not in allowed_types for arg in largs): - parser.print_help(sys.stderr) - return 0 - if not largs: - largs = allowed_types - if not argns.json: - for arg in largs: - _print_list(arg) - else: - _print_list_as_json(largs) - return 0 - - # handle ``pygmentize -H`` - if argns.H: - if not is_only_option('H'): - parser.print_help(sys.stderr) - return 2 - what, name = argns.H - if what not in ('lexer', 'formatter', 'filter'): - parser.print_help(sys.stderr) - return 2 - return _print_help(what, name) - - # parse -O options - parsed_opts = _parse_options(argns.O or []) - - # parse -P options - for p_opt in argns.P or []: - try: - name, value = p_opt.split('=', 1) - except ValueError: - parsed_opts[p_opt] = True - else: - parsed_opts[name] = value - - # encodings - inencoding = parsed_opts.get('inencoding', parsed_opts.get('encoding')) - outencoding = parsed_opts.get('outencoding', parsed_opts.get('encoding')) - - # handle ``pygmentize -N`` - if argns.N: - lexer = find_lexer_class_for_filename(argns.N) - if lexer is None: - lexer = TextLexer - - print(lexer.aliases[0]) - return 0 - - # handle ``pygmentize -C`` - if argns.C: - inp = sys.stdin.buffer.read() - try: - lexer = guess_lexer(inp, inencoding=inencoding) - except ClassNotFound: - lexer = TextLexer - - print(lexer.aliases[0]) - return 0 - - # handle ``pygmentize -S`` - S_opt = argns.S - a_opt = argns.a - if S_opt is not None: - f_opt = argns.f - if not f_opt: - parser.print_help(sys.stderr) - return 2 - if argns.l or argns.INPUTFILE: - parser.print_help(sys.stderr) - return 2 - - try: - parsed_opts['style'] = S_opt - fmter = get_formatter_by_name(f_opt, **parsed_opts) - except ClassNotFound as err: - print(err, file=sys.stderr) - return 1 - - print(fmter.get_style_defs(a_opt or '')) - return 0 - - # if no -S is given, -a is not allowed - if argns.a is not None: - parser.print_help(sys.stderr) - return 2 - - # parse -F options - F_opts = _parse_filters(argns.F or []) - - # -x: allow custom (eXternal) lexers and formatters - allow_custom_lexer_formatter = bool(argns.x) - - # select lexer - lexer = None - - # given by name? - lexername = argns.l - if lexername: - # custom lexer, located relative to user's cwd - if allow_custom_lexer_formatter and '.py' in lexername: - try: - filename = None - name = None - if ':' in lexername: - filename, name = lexername.rsplit(':', 1) - - if '.py' in name: - # This can happen on Windows: If the lexername is - # C:\lexer.py -- return to normal load path in that case - name = None - - if filename and name: - lexer = load_lexer_from_file(filename, name, - **parsed_opts) - else: - lexer = load_lexer_from_file(lexername, **parsed_opts) - except ClassNotFound as err: - print('Error:', err, file=sys.stderr) - return 1 - else: - try: - lexer = get_lexer_by_name(lexername, **parsed_opts) - except (OptionError, ClassNotFound) as err: - print('Error:', err, file=sys.stderr) - return 1 - - # read input code - code = None - - if argns.INPUTFILE: - if argns.s: - print('Error: -s option not usable when input file specified', - file=sys.stderr) - return 2 - - infn = argns.INPUTFILE - try: - with open(infn, 'rb') as infp: - code = infp.read() - except Exception as err: - print('Error: cannot read infile:', err, file=sys.stderr) - return 1 - if not inencoding: - code, inencoding = guess_decode(code) - - # do we have to guess the lexer? - if not lexer: - try: - lexer = get_lexer_for_filename(infn, code, **parsed_opts) - except ClassNotFound as err: - if argns.g: - try: - lexer = guess_lexer(code, **parsed_opts) - except ClassNotFound: - lexer = TextLexer(**parsed_opts) - else: - print('Error:', err, file=sys.stderr) - return 1 - except OptionError as err: - print('Error:', err, file=sys.stderr) - return 1 - - elif not argns.s: # treat stdin as full file (-s support is later) - # read code from terminal, always in binary mode since we want to - # decode ourselves and be tolerant with it - code = sys.stdin.buffer.read() # use .buffer to get a binary stream - if not inencoding: - code, inencoding = guess_decode_from_terminal(code, sys.stdin) - # else the lexer will do the decoding - if not lexer: - try: - lexer = guess_lexer(code, **parsed_opts) - except ClassNotFound: - lexer = TextLexer(**parsed_opts) - - else: # -s option needs a lexer with -l - if not lexer: - print('Error: when using -s a lexer has to be selected with -l', - file=sys.stderr) - return 2 - - # process filters - for fname, fopts in F_opts: - try: - lexer.add_filter(fname, **fopts) - except ClassNotFound as err: - print('Error:', err, file=sys.stderr) - return 1 - - # select formatter - outfn = argns.o - fmter = argns.f - if fmter: - # custom formatter, located relative to user's cwd - if allow_custom_lexer_formatter and '.py' in fmter: - try: - filename = None - name = None - if ':' in fmter: - # Same logic as above for custom lexer - filename, name = fmter.rsplit(':', 1) - - if '.py' in name: - name = None - - if filename and name: - fmter = load_formatter_from_file(filename, name, - **parsed_opts) - else: - fmter = load_formatter_from_file(fmter, **parsed_opts) - except ClassNotFound as err: - print('Error:', err, file=sys.stderr) - return 1 - else: - try: - fmter = get_formatter_by_name(fmter, **parsed_opts) - except (OptionError, ClassNotFound) as err: - print('Error:', err, file=sys.stderr) - return 1 - - if outfn: - if not fmter: - try: - fmter = get_formatter_for_filename(outfn, **parsed_opts) - except (OptionError, ClassNotFound) as err: - print('Error:', err, file=sys.stderr) - return 1 - try: - outfile = open(outfn, 'wb') - except Exception as err: - print('Error: cannot open outfile:', err, file=sys.stderr) - return 1 - else: - if not fmter: - if os.environ.get('COLORTERM','') in ('truecolor', '24bit'): - fmter = TerminalTrueColorFormatter(**parsed_opts) - elif '256' in os.environ.get('TERM', ''): - fmter = Terminal256Formatter(**parsed_opts) - else: - fmter = TerminalFormatter(**parsed_opts) - outfile = sys.stdout.buffer - - # determine output encoding if not explicitly selected - if not outencoding: - if outfn: - # output file? use lexer encoding for now (can still be None) - fmter.encoding = inencoding - else: - # else use terminal encoding - fmter.encoding = terminal_encoding(sys.stdout) - - # provide coloring under Windows, if possible - if not outfn and sys.platform in ('win32', 'cygwin') and \ - fmter.name in ('Terminal', 'Terminal256'): # pragma: no cover - # unfortunately colorama doesn't support binary streams on Py3 - outfile = UnclosingTextIOWrapper(outfile, encoding=fmter.encoding) - fmter.encoding = None - try: - import colorama.initialise - except ImportError: - pass - else: - outfile = colorama.initialise.wrap_stream( - outfile, convert=None, strip=None, autoreset=False, wrap=True) - - # When using the LaTeX formatter and the option `escapeinside` is - # specified, we need a special lexer which collects escaped text - # before running the chosen language lexer. - escapeinside = parsed_opts.get('escapeinside', '') - if len(escapeinside) == 2 and isinstance(fmter, LatexFormatter): - left = escapeinside[0] - right = escapeinside[1] - lexer = LatexEmbeddedLexer(left, right, lexer) - - # ... and do it! - if not argns.s: - # process whole input as per normal... - try: - highlight(code, lexer, fmter, outfile) - finally: - if outfn: - outfile.close() - return 0 - else: - # line by line processing of stdin (eg: for 'tail -f')... - try: - while 1: - line = sys.stdin.buffer.readline() - if not line: - break - if not inencoding: - line = guess_decode_from_terminal(line, sys.stdin)[0] - highlight(line, lexer, fmter, outfile) - if hasattr(outfile, 'flush'): - outfile.flush() - return 0 - except KeyboardInterrupt: # pragma: no cover - return 0 - finally: - if outfn: - outfile.close() - - -class HelpFormatter(argparse.HelpFormatter): - def __init__(self, prog, indent_increment=2, max_help_position=16, width=None): - if width is None: - try: - width = shutil.get_terminal_size().columns - 2 - except Exception: - pass - argparse.HelpFormatter.__init__(self, prog, indent_increment, - max_help_position, width) - - -def main(args=sys.argv): - """ - Main command line entry point. - """ - desc = "Highlight an input file and write the result to an output file." - parser = argparse.ArgumentParser(description=desc, add_help=False, - formatter_class=HelpFormatter) - - operation = parser.add_argument_group('Main operation') - lexersel = operation.add_mutually_exclusive_group() - lexersel.add_argument( - '-l', metavar='LEXER', - help='Specify the lexer to use. (Query names with -L.) If not ' - 'given and -g is not present, the lexer is guessed from the filename.') - lexersel.add_argument( - '-g', action='store_true', - help='Guess the lexer from the file contents, or pass through ' - 'as plain text if nothing can be guessed.') - operation.add_argument( - '-F', metavar='FILTER[:options]', action='append', - help='Add a filter to the token stream. (Query names with -L.) ' - 'Filter options are given after a colon if necessary.') - operation.add_argument( - '-f', metavar='FORMATTER', - help='Specify the formatter to use. (Query names with -L.) ' - 'If not given, the formatter is guessed from the output filename, ' - 'and defaults to the terminal formatter if the output is to the ' - 'terminal or an unknown file extension.') - operation.add_argument( - '-O', metavar='OPTION=value[,OPTION=value,...]', action='append', - help='Give options to the lexer and formatter as a comma-separated ' - 'list of key-value pairs. ' - 'Example: `-O bg=light,python=cool`.') - operation.add_argument( - '-P', metavar='OPTION=value', action='append', - help='Give a single option to the lexer and formatter - with this ' - 'you can pass options whose value contains commas and equal signs. ' - 'Example: `-P "heading=Pygments, the Python highlighter"`.') - operation.add_argument( - '-o', metavar='OUTPUTFILE', - help='Where to write the output. Defaults to standard output.') - - operation.add_argument( - 'INPUTFILE', nargs='?', - help='Where to read the input. Defaults to standard input.') - - flags = parser.add_argument_group('Operation flags') - flags.add_argument( - '-v', action='store_true', - help='Print a detailed traceback on unhandled exceptions, which ' - 'is useful for debugging and bug reports.') - flags.add_argument( - '-s', action='store_true', - help='Process lines one at a time until EOF, rather than waiting to ' - 'process the entire file. This only works for stdin, only for lexers ' - 'with no line-spanning constructs, and is intended for streaming ' - 'input such as you get from `tail -f`. ' - 'Example usage: `tail -f sql.log | pygmentize -s -l sql`.') - flags.add_argument( - '-x', action='store_true', - help='Allow custom lexers and formatters to be loaded from a .py file ' - 'relative to the current working directory. For example, ' - '`-l ./customlexer.py -x`. By default, this option expects a file ' - 'with a class named CustomLexer or CustomFormatter; you can also ' - 'specify your own class name with a colon (`-l ./lexer.py:MyLexer`). ' - 'Users should be very careful not to use this option with untrusted ' - 'files, because it will import and run them.') - flags.add_argument('--json', help='Output as JSON. This can ' - 'be only used in conjunction with -L.', - default=False, - action='store_true') - - special_modes_group = parser.add_argument_group( - 'Special modes - do not do any highlighting') - special_modes = special_modes_group.add_mutually_exclusive_group() - special_modes.add_argument( - '-S', metavar='STYLE -f formatter', - help='Print style definitions for STYLE for a formatter ' - 'given with -f. The argument given by -a is formatter ' - 'dependent.') - special_modes.add_argument( - '-L', nargs='*', metavar='WHAT', - help='List lexers, formatters, styles or filters -- ' - 'give additional arguments for the thing(s) you want to list ' - '(e.g. "styles"), or omit them to list everything.') - special_modes.add_argument( - '-N', metavar='FILENAME', - help='Guess and print out a lexer name based solely on the given ' - 'filename. Does not take input or highlight anything. If no specific ' - 'lexer can be determined, "text" is printed.') - special_modes.add_argument( - '-C', action='store_true', - help='Like -N, but print out a lexer name based solely on ' - 'a given content from standard input.') - special_modes.add_argument( - '-H', action='store', nargs=2, metavar=('NAME', 'TYPE'), - help='Print detailed help for the object of type , ' - 'where is one of "lexer", "formatter" or "filter".') - special_modes.add_argument( - '-V', action='store_true', - help='Print the package version.') - special_modes.add_argument( - '-h', '--help', action='store_true', - help='Print this help.') - special_modes_group.add_argument( - '-a', metavar='ARG', - help='Formatter-specific additional argument for the -S (print ' - 'style sheet) mode.') - - argns = parser.parse_args(args[1:]) - - try: - return main_inner(parser, argns) - except BrokenPipeError: - # someone closed our stdout, e.g. by quitting a pager. - return 0 - except Exception: - if argns.v: - print(file=sys.stderr) - print('*' * 65, file=sys.stderr) - print('An unhandled exception occurred while highlighting.', - file=sys.stderr) - print('Please report the whole traceback to the issue tracker at', - file=sys.stderr) - print('.', - file=sys.stderr) - print('*' * 65, file=sys.stderr) - print(file=sys.stderr) - raise - import traceback - info = traceback.format_exception(*sys.exc_info()) - msg = info[-1].strip() - if len(info) >= 3: - # extract relevant file and position info - msg += '\n (f%s)' % info[-2].split('\n')[0].strip()[1:] - print(file=sys.stderr) - print('*** Error while highlighting:', file=sys.stderr) - print(msg, file=sys.stderr) - print('*** If this is a bug you want to report, please rerun with -v.', - file=sys.stderr) - return 1 diff --git a/venv/lib/python3.11/site-packages/pygments/console.py b/venv/lib/python3.11/site-packages/pygments/console.py deleted file mode 100644 index deb4937..0000000 --- a/venv/lib/python3.11/site-packages/pygments/console.py +++ /dev/null @@ -1,70 +0,0 @@ -""" - pygments.console - ~~~~~~~~~~~~~~~~ - - Format colored console output. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -esc = "\x1b[" - -codes = {} -codes[""] = "" -codes["reset"] = esc + "39;49;00m" - -codes["bold"] = esc + "01m" -codes["faint"] = esc + "02m" -codes["standout"] = esc + "03m" -codes["underline"] = esc + "04m" -codes["blink"] = esc + "05m" -codes["overline"] = esc + "06m" - -dark_colors = ["black", "red", "green", "yellow", "blue", - "magenta", "cyan", "gray"] -light_colors = ["brightblack", "brightred", "brightgreen", "brightyellow", "brightblue", - "brightmagenta", "brightcyan", "white"] - -x = 30 -for d, l in zip(dark_colors, light_colors): - codes[d] = esc + "%im" % x - codes[l] = esc + "%im" % (60 + x) - x += 1 - -del d, l, x - -codes["white"] = codes["bold"] - - -def reset_color(): - return codes["reset"] - - -def colorize(color_key, text): - return codes[color_key] + text + codes["reset"] - - -def ansiformat(attr, text): - """ - Format ``text`` with a color and/or some attributes:: - - color normal color - *color* bold color - _color_ underlined color - +color+ blinking color - """ - result = [] - if attr[:1] == attr[-1:] == '+': - result.append(codes['blink']) - attr = attr[1:-1] - if attr[:1] == attr[-1:] == '*': - result.append(codes['bold']) - attr = attr[1:-1] - if attr[:1] == attr[-1:] == '_': - result.append(codes['underline']) - attr = attr[1:-1] - result.append(codes[attr]) - result.append(text) - result.append(codes['reset']) - return ''.join(result) diff --git a/venv/lib/python3.11/site-packages/pygments/filter.py b/venv/lib/python3.11/site-packages/pygments/filter.py deleted file mode 100644 index dafa08d..0000000 --- a/venv/lib/python3.11/site-packages/pygments/filter.py +++ /dev/null @@ -1,71 +0,0 @@ -""" - pygments.filter - ~~~~~~~~~~~~~~~ - - Module that implements the default filter. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - - -def apply_filters(stream, filters, lexer=None): - """ - Use this method to apply an iterable of filters to - a stream. If lexer is given it's forwarded to the - filter, otherwise the filter receives `None`. - """ - def _apply(filter_, stream): - yield from filter_.filter(lexer, stream) - for filter_ in filters: - stream = _apply(filter_, stream) - return stream - - -def simplefilter(f): - """ - Decorator that converts a function into a filter:: - - @simplefilter - def lowercase(self, lexer, stream, options): - for ttype, value in stream: - yield ttype, value.lower() - """ - return type(f.__name__, (FunctionFilter,), { - '__module__': getattr(f, '__module__'), - '__doc__': f.__doc__, - 'function': f, - }) - - -class Filter: - """ - Default filter. Subclass this class or use the `simplefilter` - decorator to create own filters. - """ - - def __init__(self, **options): - self.options = options - - def filter(self, lexer, stream): - raise NotImplementedError() - - -class FunctionFilter(Filter): - """ - Abstract class used by `simplefilter` to create simple - function filters on the fly. The `simplefilter` decorator - automatically creates subclasses of this class for - functions passed to it. - """ - function = None - - def __init__(self, **options): - if not hasattr(self, 'function'): - raise TypeError('%r used without bound function' % - self.__class__.__name__) - Filter.__init__(self, **options) - - def filter(self, lexer, stream): - # pylint: disable=not-callable - yield from self.function(lexer, stream, self.options) diff --git a/venv/lib/python3.11/site-packages/pygments/filters/__init__.py b/venv/lib/python3.11/site-packages/pygments/filters/__init__.py deleted file mode 100644 index 8bd5374..0000000 --- a/venv/lib/python3.11/site-packages/pygments/filters/__init__.py +++ /dev/null @@ -1,940 +0,0 @@ -""" - pygments.filters - ~~~~~~~~~~~~~~~~ - - Module containing filter lookup functions and default - filters. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re - -from pygments.token import String, Comment, Keyword, Name, Error, Whitespace, \ - string_to_tokentype -from pygments.filter import Filter -from pygments.util import get_list_opt, get_int_opt, get_bool_opt, \ - get_choice_opt, ClassNotFound, OptionError -from pygments.plugin import find_plugin_filters - - -def find_filter_class(filtername): - """Lookup a filter by name. Return None if not found.""" - if filtername in FILTERS: - return FILTERS[filtername] - for name, cls in find_plugin_filters(): - if name == filtername: - return cls - return None - - -def get_filter_by_name(filtername, **options): - """Return an instantiated filter. - - Options are passed to the filter initializer if wanted. - Raise a ClassNotFound if not found. - """ - cls = find_filter_class(filtername) - if cls: - return cls(**options) - else: - raise ClassNotFound('filter %r not found' % filtername) - - -def get_all_filters(): - """Return a generator of all filter names.""" - yield from FILTERS - for name, _ in find_plugin_filters(): - yield name - - -def _replace_special(ttype, value, regex, specialttype, - replacefunc=lambda x: x): - last = 0 - for match in regex.finditer(value): - start, end = match.start(), match.end() - if start != last: - yield ttype, value[last:start] - yield specialttype, replacefunc(value[start:end]) - last = end - if last != len(value): - yield ttype, value[last:] - - -class CodeTagFilter(Filter): - """Highlight special code tags in comments and docstrings. - - Options accepted: - - `codetags` : list of strings - A list of strings that are flagged as code tags. The default is to - highlight ``XXX``, ``TODO``, ``FIXME``, ``BUG`` and ``NOTE``. - - .. versionchanged:: 2.13 - Now recognizes ``FIXME`` by default. - """ - - def __init__(self, **options): - Filter.__init__(self, **options) - tags = get_list_opt(options, 'codetags', - ['XXX', 'TODO', 'FIXME', 'BUG', 'NOTE']) - self.tag_re = re.compile(r'\b(%s)\b' % '|'.join([ - re.escape(tag) for tag in tags if tag - ])) - - def filter(self, lexer, stream): - regex = self.tag_re - for ttype, value in stream: - if ttype in String.Doc or \ - ttype in Comment and \ - ttype not in Comment.Preproc: - yield from _replace_special(ttype, value, regex, Comment.Special) - else: - yield ttype, value - - -class SymbolFilter(Filter): - """Convert mathematical symbols such as \\ in Isabelle - or \\longrightarrow in LaTeX into Unicode characters. - - This is mostly useful for HTML or console output when you want to - approximate the source rendering you'd see in an IDE. - - Options accepted: - - `lang` : string - The symbol language. Must be one of ``'isabelle'`` or - ``'latex'``. The default is ``'isabelle'``. - """ - - latex_symbols = { - '\\alpha' : '\U000003b1', - '\\beta' : '\U000003b2', - '\\gamma' : '\U000003b3', - '\\delta' : '\U000003b4', - '\\varepsilon' : '\U000003b5', - '\\zeta' : '\U000003b6', - '\\eta' : '\U000003b7', - '\\vartheta' : '\U000003b8', - '\\iota' : '\U000003b9', - '\\kappa' : '\U000003ba', - '\\lambda' : '\U000003bb', - '\\mu' : '\U000003bc', - '\\nu' : '\U000003bd', - '\\xi' : '\U000003be', - '\\pi' : '\U000003c0', - '\\varrho' : '\U000003c1', - '\\sigma' : '\U000003c3', - '\\tau' : '\U000003c4', - '\\upsilon' : '\U000003c5', - '\\varphi' : '\U000003c6', - '\\chi' : '\U000003c7', - '\\psi' : '\U000003c8', - '\\omega' : '\U000003c9', - '\\Gamma' : '\U00000393', - '\\Delta' : '\U00000394', - '\\Theta' : '\U00000398', - '\\Lambda' : '\U0000039b', - '\\Xi' : '\U0000039e', - '\\Pi' : '\U000003a0', - '\\Sigma' : '\U000003a3', - '\\Upsilon' : '\U000003a5', - '\\Phi' : '\U000003a6', - '\\Psi' : '\U000003a8', - '\\Omega' : '\U000003a9', - '\\leftarrow' : '\U00002190', - '\\longleftarrow' : '\U000027f5', - '\\rightarrow' : '\U00002192', - '\\longrightarrow' : '\U000027f6', - '\\Leftarrow' : '\U000021d0', - '\\Longleftarrow' : '\U000027f8', - '\\Rightarrow' : '\U000021d2', - '\\Longrightarrow' : '\U000027f9', - '\\leftrightarrow' : '\U00002194', - '\\longleftrightarrow' : '\U000027f7', - '\\Leftrightarrow' : '\U000021d4', - '\\Longleftrightarrow' : '\U000027fa', - '\\mapsto' : '\U000021a6', - '\\longmapsto' : '\U000027fc', - '\\relbar' : '\U00002500', - '\\Relbar' : '\U00002550', - '\\hookleftarrow' : '\U000021a9', - '\\hookrightarrow' : '\U000021aa', - '\\leftharpoondown' : '\U000021bd', - '\\rightharpoondown' : '\U000021c1', - '\\leftharpoonup' : '\U000021bc', - '\\rightharpoonup' : '\U000021c0', - '\\rightleftharpoons' : '\U000021cc', - '\\leadsto' : '\U0000219d', - '\\downharpoonleft' : '\U000021c3', - '\\downharpoonright' : '\U000021c2', - '\\upharpoonleft' : '\U000021bf', - '\\upharpoonright' : '\U000021be', - '\\restriction' : '\U000021be', - '\\uparrow' : '\U00002191', - '\\Uparrow' : '\U000021d1', - '\\downarrow' : '\U00002193', - '\\Downarrow' : '\U000021d3', - '\\updownarrow' : '\U00002195', - '\\Updownarrow' : '\U000021d5', - '\\langle' : '\U000027e8', - '\\rangle' : '\U000027e9', - '\\lceil' : '\U00002308', - '\\rceil' : '\U00002309', - '\\lfloor' : '\U0000230a', - '\\rfloor' : '\U0000230b', - '\\flqq' : '\U000000ab', - '\\frqq' : '\U000000bb', - '\\bot' : '\U000022a5', - '\\top' : '\U000022a4', - '\\wedge' : '\U00002227', - '\\bigwedge' : '\U000022c0', - '\\vee' : '\U00002228', - '\\bigvee' : '\U000022c1', - '\\forall' : '\U00002200', - '\\exists' : '\U00002203', - '\\nexists' : '\U00002204', - '\\neg' : '\U000000ac', - '\\Box' : '\U000025a1', - '\\Diamond' : '\U000025c7', - '\\vdash' : '\U000022a2', - '\\models' : '\U000022a8', - '\\dashv' : '\U000022a3', - '\\surd' : '\U0000221a', - '\\le' : '\U00002264', - '\\ge' : '\U00002265', - '\\ll' : '\U0000226a', - '\\gg' : '\U0000226b', - '\\lesssim' : '\U00002272', - '\\gtrsim' : '\U00002273', - '\\lessapprox' : '\U00002a85', - '\\gtrapprox' : '\U00002a86', - '\\in' : '\U00002208', - '\\notin' : '\U00002209', - '\\subset' : '\U00002282', - '\\supset' : '\U00002283', - '\\subseteq' : '\U00002286', - '\\supseteq' : '\U00002287', - '\\sqsubset' : '\U0000228f', - '\\sqsupset' : '\U00002290', - '\\sqsubseteq' : '\U00002291', - '\\sqsupseteq' : '\U00002292', - '\\cap' : '\U00002229', - '\\bigcap' : '\U000022c2', - '\\cup' : '\U0000222a', - '\\bigcup' : '\U000022c3', - '\\sqcup' : '\U00002294', - '\\bigsqcup' : '\U00002a06', - '\\sqcap' : '\U00002293', - '\\Bigsqcap' : '\U00002a05', - '\\setminus' : '\U00002216', - '\\propto' : '\U0000221d', - '\\uplus' : '\U0000228e', - '\\bigplus' : '\U00002a04', - '\\sim' : '\U0000223c', - '\\doteq' : '\U00002250', - '\\simeq' : '\U00002243', - '\\approx' : '\U00002248', - '\\asymp' : '\U0000224d', - '\\cong' : '\U00002245', - '\\equiv' : '\U00002261', - '\\Join' : '\U000022c8', - '\\bowtie' : '\U00002a1d', - '\\prec' : '\U0000227a', - '\\succ' : '\U0000227b', - '\\preceq' : '\U0000227c', - '\\succeq' : '\U0000227d', - '\\parallel' : '\U00002225', - '\\mid' : '\U000000a6', - '\\pm' : '\U000000b1', - '\\mp' : '\U00002213', - '\\times' : '\U000000d7', - '\\div' : '\U000000f7', - '\\cdot' : '\U000022c5', - '\\star' : '\U000022c6', - '\\circ' : '\U00002218', - '\\dagger' : '\U00002020', - '\\ddagger' : '\U00002021', - '\\lhd' : '\U000022b2', - '\\rhd' : '\U000022b3', - '\\unlhd' : '\U000022b4', - '\\unrhd' : '\U000022b5', - '\\triangleleft' : '\U000025c3', - '\\triangleright' : '\U000025b9', - '\\triangle' : '\U000025b3', - '\\triangleq' : '\U0000225c', - '\\oplus' : '\U00002295', - '\\bigoplus' : '\U00002a01', - '\\otimes' : '\U00002297', - '\\bigotimes' : '\U00002a02', - '\\odot' : '\U00002299', - '\\bigodot' : '\U00002a00', - '\\ominus' : '\U00002296', - '\\oslash' : '\U00002298', - '\\dots' : '\U00002026', - '\\cdots' : '\U000022ef', - '\\sum' : '\U00002211', - '\\prod' : '\U0000220f', - '\\coprod' : '\U00002210', - '\\infty' : '\U0000221e', - '\\int' : '\U0000222b', - '\\oint' : '\U0000222e', - '\\clubsuit' : '\U00002663', - '\\diamondsuit' : '\U00002662', - '\\heartsuit' : '\U00002661', - '\\spadesuit' : '\U00002660', - '\\aleph' : '\U00002135', - '\\emptyset' : '\U00002205', - '\\nabla' : '\U00002207', - '\\partial' : '\U00002202', - '\\flat' : '\U0000266d', - '\\natural' : '\U0000266e', - '\\sharp' : '\U0000266f', - '\\angle' : '\U00002220', - '\\copyright' : '\U000000a9', - '\\textregistered' : '\U000000ae', - '\\textonequarter' : '\U000000bc', - '\\textonehalf' : '\U000000bd', - '\\textthreequarters' : '\U000000be', - '\\textordfeminine' : '\U000000aa', - '\\textordmasculine' : '\U000000ba', - '\\euro' : '\U000020ac', - '\\pounds' : '\U000000a3', - '\\yen' : '\U000000a5', - '\\textcent' : '\U000000a2', - '\\textcurrency' : '\U000000a4', - '\\textdegree' : '\U000000b0', - } - - isabelle_symbols = { - '\\' : '\U0001d7ec', - '\\' : '\U0001d7ed', - '\\' : '\U0001d7ee', - '\\' : '\U0001d7ef', - '\\' : '\U0001d7f0', - '\\' : '\U0001d7f1', - '\\' : '\U0001d7f2', - '\\' : '\U0001d7f3', - '\\' : '\U0001d7f4', - '\\' : '\U0001d7f5', - '\\' : '\U0001d49c', - '\\' : '\U0000212c', - '\\' : '\U0001d49e', - '\\' : '\U0001d49f', - '\\' : '\U00002130', - '\\' : '\U00002131', - '\\' : '\U0001d4a2', - '\\' : '\U0000210b', - '\\' : '\U00002110', - '\\' : '\U0001d4a5', - '\\' : '\U0001d4a6', - '\\' : '\U00002112', - '\\' : '\U00002133', - '\\' : '\U0001d4a9', - '\\' : '\U0001d4aa', - '\\

' : '\U0001d5c9', - '\\' : '\U0001d5ca', - '\\' : '\U0001d5cb', - '\\' : '\U0001d5cc', - '\\' : '\U0001d5cd', - '\\' : '\U0001d5ce', - '\\' : '\U0001d5cf', - '\\' : '\U0001d5d0', - '\\' : '\U0001d5d1', - '\\' : '\U0001d5d2', - '\\' : '\U0001d5d3', - '\\' : '\U0001d504', - '\\' : '\U0001d505', - '\\' : '\U0000212d', - '\\

' : '\U0001d507', - '\\' : '\U0001d508', - '\\' : '\U0001d509', - '\\' : '\U0001d50a', - '\\' : '\U0000210c', - '\\' : '\U00002111', - '\\' : '\U0001d50d', - '\\' : '\U0001d50e', - '\\' : '\U0001d50f', - '\\' : '\U0001d510', - '\\' : '\U0001d511', - '\\' : '\U0001d512', - '\\' : '\U0001d513', - '\\' : '\U0001d514', - '\\' : '\U0000211c', - '\\' : '\U0001d516', - '\\' : '\U0001d517', - '\\' : '\U0001d518', - '\\' : '\U0001d519', - '\\' : '\U0001d51a', - '\\' : '\U0001d51b', - '\\' : '\U0001d51c', - '\\' : '\U00002128', - '\\' : '\U0001d51e', - '\\' : '\U0001d51f', - '\\' : '\U0001d520', - '\\
' : '\U0001d521', - '\\' : '\U0001d522', - '\\' : '\U0001d523', - '\\' : '\U0001d524', - '\\' : '\U0001d525', - '\\' : '\U0001d526', - '\\' : '\U0001d527', - '\\' : '\U0001d528', - '\\' : '\U0001d529', - '\\' : '\U0001d52a', - '\\' : '\U0001d52b', - '\\' : '\U0001d52c', - '\\' : '\U0001d52d', - '\\' : '\U0001d52e', - '\\' : '\U0001d52f', - '\\' : '\U0001d530', - '\\' : '\U0001d531', - '\\' : '\U0001d532', - '\\' : '\U0001d533', - '\\' : '\U0001d534', - '\\' : '\U0001d535', - '\\' : '\U0001d536', - '\\' : '\U0001d537', - '\\' : '\U000003b1', - '\\' : '\U000003b2', - '\\' : '\U000003b3', - '\\' : '\U000003b4', - '\\' : '\U000003b5', - '\\' : '\U000003b6', - '\\' : '\U000003b7', - '\\' : '\U000003b8', - '\\' : '\U000003b9', - '\\' : '\U000003ba', - '\\' : '\U000003bb', - '\\' : '\U000003bc', - '\\' : '\U000003bd', - '\\' : '\U000003be', - '\\' : '\U000003c0', - '\\' : '\U000003c1', - '\\' : '\U000003c3', - '\\' : '\U000003c4', - '\\' : '\U000003c5', - '\\' : '\U000003c6', - '\\' : '\U000003c7', - '\\' : '\U000003c8', - '\\' : '\U000003c9', - '\\' : '\U00000393', - '\\' : '\U00000394', - '\\' : '\U00000398', - '\\' : '\U0000039b', - '\\' : '\U0000039e', - '\\' : '\U000003a0', - '\\' : '\U000003a3', - '\\' : '\U000003a5', - '\\' : '\U000003a6', - '\\' : '\U000003a8', - '\\' : '\U000003a9', - '\\' : '\U0001d539', - '\\' : '\U00002102', - '\\' : '\U00002115', - '\\' : '\U0000211a', - '\\' : '\U0000211d', - '\\' : '\U00002124', - '\\' : '\U00002190', - '\\' : '\U000027f5', - '\\' : '\U00002192', - '\\' : '\U000027f6', - '\\' : '\U000021d0', - '\\' : '\U000027f8', - '\\' : '\U000021d2', - '\\' : '\U000027f9', - '\\' : '\U00002194', - '\\' : '\U000027f7', - '\\' : '\U000021d4', - '\\' : '\U000027fa', - '\\' : '\U000021a6', - '\\' : '\U000027fc', - '\\' : '\U00002500', - '\\' : '\U00002550', - '\\' : '\U000021a9', - '\\' : '\U000021aa', - '\\' : '\U000021bd', - '\\' : '\U000021c1', - '\\' : '\U000021bc', - '\\' : '\U000021c0', - '\\' : '\U000021cc', - '\\' : '\U0000219d', - '\\' : '\U000021c3', - '\\' : '\U000021c2', - '\\' : '\U000021bf', - '\\' : '\U000021be', - '\\' : '\U000021be', - '\\' : '\U00002237', - '\\' : '\U00002191', - '\\' : '\U000021d1', - '\\' : '\U00002193', - '\\' : '\U000021d3', - '\\' : '\U00002195', - '\\' : '\U000021d5', - '\\' : '\U000027e8', - '\\' : '\U000027e9', - '\\' : '\U00002308', - '\\' : '\U00002309', - '\\' : '\U0000230a', - '\\' : '\U0000230b', - '\\' : '\U00002987', - '\\' : '\U00002988', - '\\' : '\U000027e6', - '\\' : '\U000027e7', - '\\' : '\U00002983', - '\\' : '\U00002984', - '\\' : '\U000000ab', - '\\' : '\U000000bb', - '\\' : '\U000022a5', - '\\' : '\U000022a4', - '\\' : '\U00002227', - '\\' : '\U000022c0', - '\\' : '\U00002228', - '\\' : '\U000022c1', - '\\' : '\U00002200', - '\\' : '\U00002203', - '\\' : '\U00002204', - '\\' : '\U000000ac', - '\\' : '\U000025a1', - '\\' : '\U000025c7', - '\\' : '\U000022a2', - '\\' : '\U000022a8', - '\\' : '\U000022a9', - '\\' : '\U000022ab', - '\\' : '\U000022a3', - '\\' : '\U0000221a', - '\\' : '\U00002264', - '\\' : '\U00002265', - '\\' : '\U0000226a', - '\\' : '\U0000226b', - '\\' : '\U00002272', - '\\' : '\U00002273', - '\\' : '\U00002a85', - '\\' : '\U00002a86', - '\\' : '\U00002208', - '\\' : '\U00002209', - '\\' : '\U00002282', - '\\' : '\U00002283', - '\\' : '\U00002286', - '\\' : '\U00002287', - '\\' : '\U0000228f', - '\\' : '\U00002290', - '\\' : '\U00002291', - '\\' : '\U00002292', - '\\' : '\U00002229', - '\\' : '\U000022c2', - '\\' : '\U0000222a', - '\\' : '\U000022c3', - '\\' : '\U00002294', - '\\' : '\U00002a06', - '\\' : '\U00002293', - '\\' : '\U00002a05', - '\\' : '\U00002216', - '\\' : '\U0000221d', - '\\' : '\U0000228e', - '\\' : '\U00002a04', - '\\' : '\U00002260', - '\\' : '\U0000223c', - '\\' : '\U00002250', - '\\' : '\U00002243', - '\\' : '\U00002248', - '\\' : '\U0000224d', - '\\' : '\U00002245', - '\\' : '\U00002323', - '\\' : '\U00002261', - '\\' : '\U00002322', - '\\' : '\U000022c8', - '\\' : '\U00002a1d', - '\\' : '\U0000227a', - '\\' : '\U0000227b', - '\\' : '\U0000227c', - '\\' : '\U0000227d', - '\\' : '\U00002225', - '\\' : '\U000000a6', - '\\' : '\U000000b1', - '\\' : '\U00002213', - '\\' : '\U000000d7', - '\\
' : '\U000000f7', - '\\' : '\U000022c5', - '\\' : '\U000022c6', - '\\' : '\U00002219', - '\\' : '\U00002218', - '\\' : '\U00002020', - '\\' : '\U00002021', - '\\' : '\U000022b2', - '\\' : '\U000022b3', - '\\' : '\U000022b4', - '\\' : '\U000022b5', - '\\' : '\U000025c3', - '\\' : '\U000025b9', - '\\' : '\U000025b3', - '\\' : '\U0000225c', - '\\' : '\U00002295', - '\\' : '\U00002a01', - '\\' : '\U00002297', - '\\' : '\U00002a02', - '\\' : '\U00002299', - '\\' : '\U00002a00', - '\\' : '\U00002296', - '\\' : '\U00002298', - '\\' : '\U00002026', - '\\' : '\U000022ef', - '\\' : '\U00002211', - '\\' : '\U0000220f', - '\\' : '\U00002210', - '\\' : '\U0000221e', - '\\' : '\U0000222b', - '\\' : '\U0000222e', - '\\' : '\U00002663', - '\\' : '\U00002662', - '\\' : '\U00002661', - '\\' : '\U00002660', - '\\' : '\U00002135', - '\\' : '\U00002205', - '\\' : '\U00002207', - '\\' : '\U00002202', - '\\' : '\U0000266d', - '\\' : '\U0000266e', - '\\' : '\U0000266f', - '\\' : '\U00002220', - '\\' : '\U000000a9', - '\\' : '\U000000ae', - '\\' : '\U000000ad', - '\\' : '\U000000af', - '\\' : '\U000000bc', - '\\' : '\U000000bd', - '\\' : '\U000000be', - '\\' : '\U000000aa', - '\\' : '\U000000ba', - '\\
' : '\U000000a7', - '\\' : '\U000000b6', - '\\' : '\U000000a1', - '\\' : '\U000000bf', - '\\' : '\U000020ac', - '\\' : '\U000000a3', - '\\' : '\U000000a5', - '\\' : '\U000000a2', - '\\' : '\U000000a4', - '\\' : '\U000000b0', - '\\' : '\U00002a3f', - '\\' : '\U00002127', - '\\' : '\U000025ca', - '\\' : '\U00002118', - '\\' : '\U00002240', - '\\' : '\U000022c4', - '\\' : '\U000000b4', - '\\' : '\U00000131', - '\\' : '\U000000a8', - '\\' : '\U000000b8', - '\\' : '\U000002dd', - '\\' : '\U000003f5', - '\\' : '\U000023ce', - '\\' : '\U00002039', - '\\' : '\U0000203a', - '\\' : '\U00002302', - '\\<^sub>' : '\U000021e9', - '\\<^sup>' : '\U000021e7', - '\\<^bold>' : '\U00002759', - '\\<^bsub>' : '\U000021d8', - '\\<^esub>' : '\U000021d9', - '\\<^bsup>' : '\U000021d7', - '\\<^esup>' : '\U000021d6', - } - - lang_map = {'isabelle' : isabelle_symbols, 'latex' : latex_symbols} - - def __init__(self, **options): - Filter.__init__(self, **options) - lang = get_choice_opt(options, 'lang', - ['isabelle', 'latex'], 'isabelle') - self.symbols = self.lang_map[lang] - - def filter(self, lexer, stream): - for ttype, value in stream: - if value in self.symbols: - yield ttype, self.symbols[value] - else: - yield ttype, value - - -class KeywordCaseFilter(Filter): - """Convert keywords to lowercase or uppercase or capitalize them, which - means first letter uppercase, rest lowercase. - - This can be useful e.g. if you highlight Pascal code and want to adapt the - code to your styleguide. - - Options accepted: - - `case` : string - The casing to convert keywords to. Must be one of ``'lower'``, - ``'upper'`` or ``'capitalize'``. The default is ``'lower'``. - """ - - def __init__(self, **options): - Filter.__init__(self, **options) - case = get_choice_opt(options, 'case', - ['lower', 'upper', 'capitalize'], 'lower') - self.convert = getattr(str, case) - - def filter(self, lexer, stream): - for ttype, value in stream: - if ttype in Keyword: - yield ttype, self.convert(value) - else: - yield ttype, value - - -class NameHighlightFilter(Filter): - """Highlight a normal Name (and Name.*) token with a different token type. - - Example:: - - filter = NameHighlightFilter( - names=['foo', 'bar', 'baz'], - tokentype=Name.Function, - ) - - This would highlight the names "foo", "bar" and "baz" - as functions. `Name.Function` is the default token type. - - Options accepted: - - `names` : list of strings - A list of names that should be given the different token type. - There is no default. - `tokentype` : TokenType or string - A token type or a string containing a token type name that is - used for highlighting the strings in `names`. The default is - `Name.Function`. - """ - - def __init__(self, **options): - Filter.__init__(self, **options) - self.names = set(get_list_opt(options, 'names', [])) - tokentype = options.get('tokentype') - if tokentype: - self.tokentype = string_to_tokentype(tokentype) - else: - self.tokentype = Name.Function - - def filter(self, lexer, stream): - for ttype, value in stream: - if ttype in Name and value in self.names: - yield self.tokentype, value - else: - yield ttype, value - - -class ErrorToken(Exception): - pass - - -class RaiseOnErrorTokenFilter(Filter): - """Raise an exception when the lexer generates an error token. - - Options accepted: - - `excclass` : Exception class - The exception class to raise. - The default is `pygments.filters.ErrorToken`. - - .. versionadded:: 0.8 - """ - - def __init__(self, **options): - Filter.__init__(self, **options) - self.exception = options.get('excclass', ErrorToken) - try: - # issubclass() will raise TypeError if first argument is not a class - if not issubclass(self.exception, Exception): - raise TypeError - except TypeError: - raise OptionError('excclass option is not an exception class') - - def filter(self, lexer, stream): - for ttype, value in stream: - if ttype is Error: - raise self.exception(value) - yield ttype, value - - -class VisibleWhitespaceFilter(Filter): - """Convert tabs, newlines and/or spaces to visible characters. - - Options accepted: - - `spaces` : string or bool - If this is a one-character string, spaces will be replaces by this string. - If it is another true value, spaces will be replaced by ``·`` (unicode - MIDDLE DOT). If it is a false value, spaces will not be replaced. The - default is ``False``. - `tabs` : string or bool - The same as for `spaces`, but the default replacement character is ``»`` - (unicode RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK). The default value - is ``False``. Note: this will not work if the `tabsize` option for the - lexer is nonzero, as tabs will already have been expanded then. - `tabsize` : int - If tabs are to be replaced by this filter (see the `tabs` option), this - is the total number of characters that a tab should be expanded to. - The default is ``8``. - `newlines` : string or bool - The same as for `spaces`, but the default replacement character is ``¶`` - (unicode PILCROW SIGN). The default value is ``False``. - `wstokentype` : bool - If true, give whitespace the special `Whitespace` token type. This allows - styling the visible whitespace differently (e.g. greyed out), but it can - disrupt background colors. The default is ``True``. - - .. versionadded:: 0.8 - """ - - def __init__(self, **options): - Filter.__init__(self, **options) - for name, default in [('spaces', '·'), - ('tabs', '»'), - ('newlines', '¶')]: - opt = options.get(name, False) - if isinstance(opt, str) and len(opt) == 1: - setattr(self, name, opt) - else: - setattr(self, name, (opt and default or '')) - tabsize = get_int_opt(options, 'tabsize', 8) - if self.tabs: - self.tabs += ' ' * (tabsize - 1) - if self.newlines: - self.newlines += '\n' - self.wstt = get_bool_opt(options, 'wstokentype', True) - - def filter(self, lexer, stream): - if self.wstt: - spaces = self.spaces or ' ' - tabs = self.tabs or '\t' - newlines = self.newlines or '\n' - regex = re.compile(r'\s') - - def replacefunc(wschar): - if wschar == ' ': - return spaces - elif wschar == '\t': - return tabs - elif wschar == '\n': - return newlines - return wschar - - for ttype, value in stream: - yield from _replace_special(ttype, value, regex, Whitespace, - replacefunc) - else: - spaces, tabs, newlines = self.spaces, self.tabs, self.newlines - # simpler processing - for ttype, value in stream: - if spaces: - value = value.replace(' ', spaces) - if tabs: - value = value.replace('\t', tabs) - if newlines: - value = value.replace('\n', newlines) - yield ttype, value - - -class GobbleFilter(Filter): - """Gobbles source code lines (eats initial characters). - - This filter drops the first ``n`` characters off every line of code. This - may be useful when the source code fed to the lexer is indented by a fixed - amount of space that isn't desired in the output. - - Options accepted: - - `n` : int - The number of characters to gobble. - - .. versionadded:: 1.2 - """ - def __init__(self, **options): - Filter.__init__(self, **options) - self.n = get_int_opt(options, 'n', 0) - - def gobble(self, value, left): - if left < len(value): - return value[left:], 0 - else: - return '', left - len(value) - - def filter(self, lexer, stream): - n = self.n - left = n # How many characters left to gobble. - for ttype, value in stream: - # Remove ``left`` tokens from first line, ``n`` from all others. - parts = value.split('\n') - (parts[0], left) = self.gobble(parts[0], left) - for i in range(1, len(parts)): - (parts[i], left) = self.gobble(parts[i], n) - value = '\n'.join(parts) - - if value != '': - yield ttype, value - - -class TokenMergeFilter(Filter): - """Merges consecutive tokens with the same token type in the output - stream of a lexer. - - .. versionadded:: 1.2 - """ - def __init__(self, **options): - Filter.__init__(self, **options) - - def filter(self, lexer, stream): - current_type = None - current_value = None - for ttype, value in stream: - if ttype is current_type: - current_value += value - else: - if current_type is not None: - yield current_type, current_value - current_type = ttype - current_value = value - if current_type is not None: - yield current_type, current_value - - -FILTERS = { - 'codetagify': CodeTagFilter, - 'keywordcase': KeywordCaseFilter, - 'highlight': NameHighlightFilter, - 'raiseonerror': RaiseOnErrorTokenFilter, - 'whitespace': VisibleWhitespaceFilter, - 'gobble': GobbleFilter, - 'tokenmerge': TokenMergeFilter, - 'symbols': SymbolFilter, -} diff --git a/venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index ee230d5..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/filters/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatter.py b/venv/lib/python3.11/site-packages/pygments/formatter.py deleted file mode 100644 index 87183ab..0000000 --- a/venv/lib/python3.11/site-packages/pygments/formatter.py +++ /dev/null @@ -1,124 +0,0 @@ -""" - pygments.formatter - ~~~~~~~~~~~~~~~~~~ - - Base formatter class. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import codecs - -from pygments.util import get_bool_opt -from pygments.styles import get_style_by_name - -__all__ = ['Formatter'] - - -def _lookup_style(style): - if isinstance(style, str): - return get_style_by_name(style) - return style - - -class Formatter: - """ - Converts a token stream to text. - - Formatters should have attributes to help selecting them. These - are similar to the corresponding :class:`~pygments.lexer.Lexer` - attributes. - - .. autoattribute:: name - :no-value: - - .. autoattribute:: aliases - :no-value: - - .. autoattribute:: filenames - :no-value: - - You can pass options as keyword arguments to the constructor. - All formatters accept these basic options: - - ``style`` - The style to use, can be a string or a Style subclass - (default: "default"). Not used by e.g. the - TerminalFormatter. - ``full`` - Tells the formatter to output a "full" document, i.e. - a complete self-contained document. This doesn't have - any effect for some formatters (default: false). - ``title`` - If ``full`` is true, the title that should be used to - caption the document (default: ''). - ``encoding`` - If given, must be an encoding name. This will be used to - convert the Unicode token strings to byte strings in the - output. If it is "" or None, Unicode strings will be written - to the output file, which most file-like objects do not - support (default: None). - ``outencoding`` - Overrides ``encoding`` if given. - - """ - - #: Full name for the formatter, in human-readable form. - name = None - - #: A list of short, unique identifiers that can be used to lookup - #: the formatter from a list, e.g. using :func:`.get_formatter_by_name()`. - aliases = [] - - #: A list of fnmatch patterns that match filenames for which this - #: formatter can produce output. The patterns in this list should be unique - #: among all formatters. - filenames = [] - - #: If True, this formatter outputs Unicode strings when no encoding - #: option is given. - unicodeoutput = True - - def __init__(self, **options): - """ - As with lexers, this constructor takes arbitrary optional arguments, - and if you override it, you should first process your own options, then - call the base class implementation. - """ - self.style = _lookup_style(options.get('style', 'default')) - self.full = get_bool_opt(options, 'full', False) - self.title = options.get('title', '') - self.encoding = options.get('encoding', None) or None - if self.encoding in ('guess', 'chardet'): - # can happen for e.g. pygmentize -O encoding=guess - self.encoding = 'utf-8' - self.encoding = options.get('outencoding') or self.encoding - self.options = options - - def get_style_defs(self, arg=''): - """ - This method must return statements or declarations suitable to define - the current style for subsequent highlighted text (e.g. CSS classes - in the `HTMLFormatter`). - - The optional argument `arg` can be used to modify the generation and - is formatter dependent (it is standardized because it can be given on - the command line). - - This method is called by the ``-S`` :doc:`command-line option `, - the `arg` is then given by the ``-a`` option. - """ - return '' - - def format(self, tokensource, outfile): - """ - This method must format the tokens from the `tokensource` iterable and - write the formatted version to the file object `outfile`. - - Formatter options can control how exactly the tokens are converted. - """ - if self.encoding: - # wrap the outfile in a StreamWriter - outfile = codecs.lookup(self.encoding)[3](outfile) - return self.format_unencoded(tokensource, outfile) diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__init__.py b/venv/lib/python3.11/site-packages/pygments/formatters/__init__.py deleted file mode 100644 index 6e482a1..0000000 --- a/venv/lib/python3.11/site-packages/pygments/formatters/__init__.py +++ /dev/null @@ -1,158 +0,0 @@ -""" - pygments.formatters - ~~~~~~~~~~~~~~~~~~~ - - Pygments formatters. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import re -import sys -import types -import fnmatch -from os.path import basename - -from pygments.formatters._mapping import FORMATTERS -from pygments.plugin import find_plugin_formatters -from pygments.util import ClassNotFound - -__all__ = ['get_formatter_by_name', 'get_formatter_for_filename', - 'get_all_formatters', 'load_formatter_from_file'] + list(FORMATTERS) - -_formatter_cache = {} # classes by name -_pattern_cache = {} - - -def _fn_matches(fn, glob): - """Return whether the supplied file name fn matches pattern filename.""" - if glob not in _pattern_cache: - pattern = _pattern_cache[glob] = re.compile(fnmatch.translate(glob)) - return pattern.match(fn) - return _pattern_cache[glob].match(fn) - - -def _load_formatters(module_name): - """Load a formatter (and all others in the module too).""" - mod = __import__(module_name, None, None, ['__all__']) - for formatter_name in mod.__all__: - cls = getattr(mod, formatter_name) - _formatter_cache[cls.name] = cls - - -def get_all_formatters(): - """Return a generator for all formatter classes.""" - # NB: this returns formatter classes, not info like get_all_lexers(). - for info in FORMATTERS.values(): - if info[1] not in _formatter_cache: - _load_formatters(info[0]) - yield _formatter_cache[info[1]] - for _, formatter in find_plugin_formatters(): - yield formatter - - -def find_formatter_class(alias): - """Lookup a formatter by alias. - - Returns None if not found. - """ - for module_name, name, aliases, _, _ in FORMATTERS.values(): - if alias in aliases: - if name not in _formatter_cache: - _load_formatters(module_name) - return _formatter_cache[name] - for _, cls in find_plugin_formatters(): - if alias in cls.aliases: - return cls - - -def get_formatter_by_name(_alias, **options): - """ - Return an instance of a :class:`.Formatter` subclass that has `alias` in its - aliases list. The formatter is given the `options` at its instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if no formatter with that - alias is found. - """ - cls = find_formatter_class(_alias) - if cls is None: - raise ClassNotFound("no formatter found for name %r" % _alias) - return cls(**options) - - -def load_formatter_from_file(filename, formattername="CustomFormatter", **options): - """ - Return a `Formatter` subclass instance loaded from the provided file, relative - to the current directory. - - The file is expected to contain a Formatter class named ``formattername`` - (by default, CustomFormatter). Users should be very careful with the input, because - this method is equivalent to running ``eval()`` on the input file. The formatter is - given the `options` at its instantiation. - - :exc:`pygments.util.ClassNotFound` is raised if there are any errors loading - the formatter. - - .. versionadded:: 2.2 - """ - try: - # This empty dict will contain the namespace for the exec'd file - custom_namespace = {} - with open(filename, 'rb') as f: - exec(f.read(), custom_namespace) - # Retrieve the class `formattername` from that namespace - if formattername not in custom_namespace: - raise ClassNotFound('no valid %s class found in %s' % - (formattername, filename)) - formatter_class = custom_namespace[formattername] - # And finally instantiate it with the options - return formatter_class(**options) - except OSError as err: - raise ClassNotFound('cannot read %s: %s' % (filename, err)) - except ClassNotFound: - raise - except Exception as err: - raise ClassNotFound('error when loading custom formatter: %s' % err) - - -def get_formatter_for_filename(fn, **options): - """ - Return a :class:`.Formatter` subclass instance that has a filename pattern - matching `fn`. The formatter is given the `options` at its instantiation. - - Will raise :exc:`pygments.util.ClassNotFound` if no formatter for that filename - is found. - """ - fn = basename(fn) - for modname, name, _, filenames, _ in FORMATTERS.values(): - for filename in filenames: - if _fn_matches(fn, filename): - if name not in _formatter_cache: - _load_formatters(modname) - return _formatter_cache[name](**options) - for _name, cls in find_plugin_formatters(): - for filename in cls.filenames: - if _fn_matches(fn, filename): - return cls(**options) - raise ClassNotFound("no formatter found for file name %r" % fn) - - -class _automodule(types.ModuleType): - """Automatically import formatters.""" - - def __getattr__(self, name): - info = FORMATTERS.get(name) - if info: - _load_formatters(info[0]) - cls = _formatter_cache[info[1]] - setattr(self, name, cls) - return cls - raise AttributeError(name) - - -oldmod = sys.modules[__name__] -newmod = _automodule(__name__) -newmod.__dict__.update(oldmod.__dict__) -sys.modules[__name__] = newmod -del newmod.newmod, newmod.oldmod, newmod.sys, newmod.types diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc deleted file mode 100644 index de3734e..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/__init__.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc deleted file mode 100644 index 268b46e..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/_mapping.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc deleted file mode 100644 index f958bd3..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/bbcode.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc deleted file mode 100644 index d578aa0..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/groff.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc deleted file mode 100644 index 42eb8bd..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/html.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc deleted file mode 100644 index 60484d1..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/img.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc deleted file mode 100644 index 022a40e..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/irc.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc deleted file mode 100644 index 2bbb094..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/latex.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc deleted file mode 100644 index 6bc4288..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/other.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc deleted file mode 100644 index 8db5e7e..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/pangomarkup.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc deleted file mode 100644 index 173c930..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/rtf.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc deleted file mode 100644 index d579678..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/svg.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc deleted file mode 100644 index e46767f..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc b/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc deleted file mode 100644 index 560606f..0000000 Binary files a/venv/lib/python3.11/site-packages/pygments/formatters/__pycache__/terminal256.cpython-311.pyc and /dev/null differ diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py b/venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py deleted file mode 100755 index 72ca840..0000000 --- a/venv/lib/python3.11/site-packages/pygments/formatters/_mapping.py +++ /dev/null @@ -1,23 +0,0 @@ -# Automatically generated by scripts/gen_mapfiles.py. -# DO NOT EDIT BY HAND; run `tox -e mapfiles` instead. - -FORMATTERS = { - 'BBCodeFormatter': ('pygments.formatters.bbcode', 'BBCode', ('bbcode', 'bb'), (), 'Format tokens with BBcodes. These formatting codes are used by many bulletin boards, so you can highlight your sourcecode with pygments before posting it there.'), - 'BmpImageFormatter': ('pygments.formatters.img', 'img_bmp', ('bmp', 'bitmap'), ('*.bmp',), 'Create a bitmap image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), - 'GifImageFormatter': ('pygments.formatters.img', 'img_gif', ('gif',), ('*.gif',), 'Create a GIF image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), - 'GroffFormatter': ('pygments.formatters.groff', 'groff', ('groff', 'troff', 'roff'), (), 'Format tokens with groff escapes to change their color and font style.'), - 'HtmlFormatter': ('pygments.formatters.html', 'HTML', ('html',), ('*.html', '*.htm'), "Format tokens as HTML 4 ```` tags. By default, the content is enclosed in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). The ``
``'s CSS class can be set by the `cssclass` option."), - 'IRCFormatter': ('pygments.formatters.irc', 'IRC', ('irc', 'IRC'), (), 'Format tokens with IRC color sequences'), - 'ImageFormatter': ('pygments.formatters.img', 'img', ('img', 'IMG', 'png'), ('*.png',), 'Create a PNG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), - 'JpgImageFormatter': ('pygments.formatters.img', 'img_jpg', ('jpg', 'jpeg'), ('*.jpg',), 'Create a JPEG image from source code. This uses the Python Imaging Library to generate a pixmap from the source code.'), - 'LatexFormatter': ('pygments.formatters.latex', 'LaTeX', ('latex', 'tex'), ('*.tex',), 'Format tokens as LaTeX code. This needs the `fancyvrb` and `color` standard packages.'), - 'NullFormatter': ('pygments.formatters.other', 'Text only', ('text', 'null'), ('*.txt',), 'Output the text unchanged without any formatting.'), - 'PangoMarkupFormatter': ('pygments.formatters.pangomarkup', 'Pango Markup', ('pango', 'pangomarkup'), (), 'Format tokens as Pango Markup code. It can then be rendered to an SVG.'), - 'RawTokenFormatter': ('pygments.formatters.other', 'Raw tokens', ('raw', 'tokens'), ('*.raw',), 'Format tokens as a raw representation for storing token streams.'), - 'RtfFormatter': ('pygments.formatters.rtf', 'RTF', ('rtf',), ('*.rtf',), 'Format tokens as RTF markup. This formatter automatically outputs full RTF documents with color information and other useful stuff. Perfect for Copy and Paste into Microsoft(R) Word(R) documents.'), - 'SvgFormatter': ('pygments.formatters.svg', 'SVG', ('svg',), ('*.svg',), 'Format tokens as an SVG graphics file. This formatter is still experimental. Each line of code is a ```` element with explicit ``x`` and ``y`` coordinates containing ```` elements with the individual token styles.'), - 'Terminal256Formatter': ('pygments.formatters.terminal256', 'Terminal256', ('terminal256', 'console256', '256'), (), 'Format tokens with ANSI color sequences, for output in a 256-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), - 'TerminalFormatter': ('pygments.formatters.terminal', 'Terminal', ('terminal', 'console'), (), 'Format tokens with ANSI color sequences, for output in a text console. Color sequences are terminated at newlines, so that paging the output works correctly.'), - 'TerminalTrueColorFormatter': ('pygments.formatters.terminal256', 'TerminalTrueColor', ('terminal16m', 'console16m', '16m'), (), 'Format tokens with ANSI color sequences, for output in a true-color terminal or console. Like in `TerminalFormatter` color sequences are terminated at newlines, so that paging the output works correctly.'), - 'TestcaseFormatter': ('pygments.formatters.other', 'Testcase', ('testcase',), (), 'Format tokens as appropriate for a new testcase.'), -} diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py b/venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py deleted file mode 100644 index 9ce4ebc..0000000 --- a/venv/lib/python3.11/site-packages/pygments/formatters/bbcode.py +++ /dev/null @@ -1,108 +0,0 @@ -""" - pygments.formatters.bbcode - ~~~~~~~~~~~~~~~~~~~~~~~~~~ - - BBcode formatter. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - - -from pygments.formatter import Formatter -from pygments.util import get_bool_opt - -__all__ = ['BBCodeFormatter'] - - -class BBCodeFormatter(Formatter): - """ - Format tokens with BBcodes. These formatting codes are used by many - bulletin boards, so you can highlight your sourcecode with pygments before - posting it there. - - This formatter has no support for background colors and borders, as there - are no common BBcode tags for that. - - Some board systems (e.g. phpBB) don't support colors in their [code] tag, - so you can't use the highlighting together with that tag. - Text in a [code] tag usually is shown with a monospace font (which this - formatter can do with the ``monofont`` option) and no spaces (which you - need for indentation) are removed. - - Additional options accepted: - - `style` - The style to use, can be a string or a Style subclass (default: - ``'default'``). - - `codetag` - If set to true, put the output into ``[code]`` tags (default: - ``false``) - - `monofont` - If set to true, add a tag to show the code with a monospace font - (default: ``false``). - """ - name = 'BBCode' - aliases = ['bbcode', 'bb'] - filenames = [] - - def __init__(self, **options): - Formatter.__init__(self, **options) - self._code = get_bool_opt(options, 'codetag', False) - self._mono = get_bool_opt(options, 'monofont', False) - - self.styles = {} - self._make_styles() - - def _make_styles(self): - for ttype, ndef in self.style: - start = end = '' - if ndef['color']: - start += '[color=#%s]' % ndef['color'] - end = '[/color]' + end - if ndef['bold']: - start += '[b]' - end = '[/b]' + end - if ndef['italic']: - start += '[i]' - end = '[/i]' + end - if ndef['underline']: - start += '[u]' - end = '[/u]' + end - # there are no common BBcodes for background-color and border - - self.styles[ttype] = start, end - - def format_unencoded(self, tokensource, outfile): - if self._code: - outfile.write('[code]') - if self._mono: - outfile.write('[font=monospace]') - - lastval = '' - lasttype = None - - for ttype, value in tokensource: - while ttype not in self.styles: - ttype = ttype.parent - if ttype == lasttype: - lastval += value - else: - if lastval: - start, end = self.styles[lasttype] - outfile.write(''.join((start, lastval, end))) - lastval = value - lasttype = ttype - - if lastval: - start, end = self.styles[lasttype] - outfile.write(''.join((start, lastval, end))) - - if self._mono: - outfile.write('[/font]') - if self._code: - outfile.write('[/code]') - if self._code or self._mono: - outfile.write('\n') diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/groff.py b/venv/lib/python3.11/site-packages/pygments/formatters/groff.py deleted file mode 100644 index 687fd54..0000000 --- a/venv/lib/python3.11/site-packages/pygments/formatters/groff.py +++ /dev/null @@ -1,170 +0,0 @@ -""" - pygments.formatters.groff - ~~~~~~~~~~~~~~~~~~~~~~~~~ - - Formatter for groff output. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import math -from pygments.formatter import Formatter -from pygments.util import get_bool_opt, get_int_opt - -__all__ = ['GroffFormatter'] - - -class GroffFormatter(Formatter): - """ - Format tokens with groff escapes to change their color and font style. - - .. versionadded:: 2.11 - - Additional options accepted: - - `style` - The style to use, can be a string or a Style subclass (default: - ``'default'``). - - `monospaced` - If set to true, monospace font will be used (default: ``true``). - - `linenos` - If set to true, print the line numbers (default: ``false``). - - `wrap` - Wrap lines to the specified number of characters. Disabled if set to 0 - (default: ``0``). - """ - - name = 'groff' - aliases = ['groff','troff','roff'] - filenames = [] - - def __init__(self, **options): - Formatter.__init__(self, **options) - - self.monospaced = get_bool_opt(options, 'monospaced', True) - self.linenos = get_bool_opt(options, 'linenos', False) - self._lineno = 0 - self.wrap = get_int_opt(options, 'wrap', 0) - self._linelen = 0 - - self.styles = {} - self._make_styles() - - - def _make_styles(self): - regular = '\\f[CR]' if self.monospaced else '\\f[R]' - bold = '\\f[CB]' if self.monospaced else '\\f[B]' - italic = '\\f[CI]' if self.monospaced else '\\f[I]' - - for ttype, ndef in self.style: - start = end = '' - if ndef['color']: - start += '\\m[%s]' % ndef['color'] - end = '\\m[]' + end - if ndef['bold']: - start += bold - end = regular + end - if ndef['italic']: - start += italic - end = regular + end - if ndef['bgcolor']: - start += '\\M[%s]' % ndef['bgcolor'] - end = '\\M[]' + end - - self.styles[ttype] = start, end - - - def _define_colors(self, outfile): - colors = set() - for _, ndef in self.style: - if ndef['color'] is not None: - colors.add(ndef['color']) - - for color in sorted(colors): - outfile.write('.defcolor ' + color + ' rgb #' + color + '\n') - - - def _write_lineno(self, outfile): - self._lineno += 1 - outfile.write("%s% 4d " % (self._lineno != 1 and '\n' or '', self._lineno)) - - - def _wrap_line(self, line): - length = len(line.rstrip('\n')) - space = ' ' if self.linenos else '' - newline = '' - - if length > self.wrap: - for i in range(0, math.floor(length / self.wrap)): - chunk = line[i*self.wrap:i*self.wrap+self.wrap] - newline += (chunk + '\n' + space) - remainder = length % self.wrap - if remainder > 0: - newline += line[-remainder-1:] - self._linelen = remainder - elif self._linelen + length > self.wrap: - newline = ('\n' + space) + line - self._linelen = length - else: - newline = line - self._linelen += length - - return newline - - - def _escape_chars(self, text): - text = text.replace('\\', '\\[u005C]'). \ - replace('.', '\\[char46]'). \ - replace('\'', '\\[u0027]'). \ - replace('`', '\\[u0060]'). \ - replace('~', '\\[u007E]') - copy = text - - for char in copy: - if len(char) != len(char.encode()): - uni = char.encode('unicode_escape') \ - .decode()[1:] \ - .replace('x', 'u00') \ - .upper() - text = text.replace(char, '\\[u' + uni[1:] + ']') - - return text - - - def format_unencoded(self, tokensource, outfile): - self._define_colors(outfile) - - outfile.write('.nf\n\\f[CR]\n') - - if self.linenos: - self._write_lineno(outfile) - - for ttype, value in tokensource: - while ttype not in self.styles: - ttype = ttype.parent - start, end = self.styles[ttype] - - for line in value.splitlines(True): - if self.wrap > 0: - line = self._wrap_line(line) - - if start and end: - text = self._escape_chars(line.rstrip('\n')) - if text != '': - outfile.write(''.join((start, text, end))) - else: - outfile.write(self._escape_chars(line.rstrip('\n'))) - - if line.endswith('\n'): - if self.linenos: - self._write_lineno(outfile) - self._linelen = 0 - else: - outfile.write('\n') - self._linelen = 0 - - outfile.write('\n.fi') diff --git a/venv/lib/python3.11/site-packages/pygments/formatters/html.py b/venv/lib/python3.11/site-packages/pygments/formatters/html.py deleted file mode 100644 index df2469e..0000000 --- a/venv/lib/python3.11/site-packages/pygments/formatters/html.py +++ /dev/null @@ -1,990 +0,0 @@ -""" - pygments.formatters.html - ~~~~~~~~~~~~~~~~~~~~~~~~ - - Formatter for HTML output. - - :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. -""" - -import functools -import os -import sys -import os.path -from io import StringIO - -from pygments.formatter import Formatter -from pygments.token import Token, Text, STANDARD_TYPES -from pygments.util import get_bool_opt, get_int_opt, get_list_opt - -try: - import ctags -except ImportError: - ctags = None - -__all__ = ['HtmlFormatter'] - - -_escape_html_table = { - ord('&'): '&', - ord('<'): '<', - ord('>'): '>', - ord('"'): '"', - ord("'"): ''', -} - - -def escape_html(text, table=_escape_html_table): - """Escape &, <, > as well as single and double quotes for HTML.""" - return text.translate(table) - - -def webify(color): - if color.startswith('calc') or color.startswith('var'): - return color - else: - return '#' + color - - -def _get_ttype_class(ttype): - fname = STANDARD_TYPES.get(ttype) - if fname: - return fname - aname = '' - while fname is None: - aname = '-' + ttype[-1] + aname - ttype = ttype.parent - fname = STANDARD_TYPES.get(ttype) - return fname + aname - - -CSSFILE_TEMPLATE = '''\ -/* -generated by Pygments -Copyright 2006-2023 by the Pygments team. -Licensed under the BSD license, see LICENSE for details. -*/ -%(styledefs)s -''' - -DOC_HEADER = '''\ - - - - - %(title)s - - - - -

%(title)s

- -''' - -DOC_HEADER_EXTERNALCSS = '''\ - - - - - %(title)s - - - - -

%(title)s

- -''' - -DOC_FOOTER = '''\ - - -''' - - -class HtmlFormatter(Formatter): - r""" - Format tokens as HTML 4 ```` tags. By default, the content is enclosed - in a ``
`` tag, itself wrapped in a ``
`` tag (but see the `nowrap` option). - The ``
``'s CSS class can be set by the `cssclass` option. - - If the `linenos` option is set to ``"table"``, the ``
`` is
-    additionally wrapped inside a ```` which has one row and two
-    cells: one containing the line numbers and one containing the code.
-    Example:
-
-    .. sourcecode:: html
-
-        
-
- - -
-
1
-            2
-
-
def foo(bar):
-              pass
-            
-
- - (whitespace added to improve clarity). - - A list of lines can be specified using the `hl_lines` option to make these - lines highlighted (as of Pygments 0.11). - - With the `full` option, a complete HTML 4 document is output, including - the style definitions inside a ``$)', _handle_cssblock), - - include('keywords'), - include('inline'), - ], - 'keywords': [ - (words(( - '\\define', '\\end', 'caption', 'created', 'modified', 'tags', - 'title', 'type'), prefix=r'^', suffix=r'\b'), - Keyword), - ], - 'inline': [ - # escape - (r'\\.', Text), - # created or modified date - (r'\d{17}', Number.Integer), - # italics - (r'(\s)(//[^/]+//)((?=\W|\n))', - bygroups(Text, Generic.Emph, Text)), - # superscript - (r'(\s)(\^\^[^\^]+\^\^)', bygroups(Text, Generic.Emph)), - # subscript - (r'(\s)(,,[^,]+,,)', bygroups(Text, Generic.Emph)), - # underscore - (r'(\s)(__[^_]+__)', bygroups(Text, Generic.Strong)), - # bold - (r"(\s)(''[^']+'')((?=\W|\n))", - bygroups(Text, Generic.Strong, Text)), - # strikethrough - (r'(\s)(~~[^~]+~~)((?=\W|\n))', - bygroups(Text, Generic.Deleted, Text)), - # TiddlyWiki variables - (r'<<[^>]+>>', Name.Tag), - (r'\$\$[^$]+\$\$', Name.Tag), - (r'\$\([^)]+\)\$', Name.Tag), - # TiddlyWiki style or class - (r'^@@.*$', Name.Tag), - # HTML tags - (r']+>', Name.Tag), - # inline code - (r'`[^`]+`', String.Backtick), - # HTML escaped symbols - (r'&\S*?;', String.Regex), - # Wiki links - (r'(\[{2})([^]\|]+)(\]{2})', bygroups(Text, Name.Tag, Text)), - # External links - (r'(\[{2})([^]\|]+)(\|)([^]\|]+)(\]{2})', - bygroups(Text, Name.Tag, Text, Name.Attribute, Text)), - # Transclusion - (r'(\{{2})([^}]+)(\}{2})', bygroups(Text, Name.Tag, Text)), - # URLs - (r'(\b.?.?tps?://[^\s"]+)', bygroups(Name.Attribute)), - - # general text, must come last! - (r'[\w]+', Text), - (r'.', Text) - ], - } - - def __init__(self, **options): - self.handlecodeblocks = get_bool_opt(options, 'handlecodeblocks', True) - RegexLexer.__init__(self, **options) - - -class WikitextLexer(RegexLexer): - """ - For MediaWiki Wikitext. - - Parsing Wikitext is tricky, and results vary between different MediaWiki - installations, so we only highlight common syntaxes (built-in or from - popular extensions), and also assume templates produce no unbalanced - syntaxes. - - .. versionadded:: 2.15 - """ - name = 'Wikitext' - url = 'https://www.mediawiki.org/wiki/Wikitext' - aliases = ['wikitext', 'mediawiki'] - filenames = [] - mimetypes = ['text/x-wiki'] - flags = re.MULTILINE - - def nowiki_tag_rules(tag_name): - return [ - (r'(?i)()'.format(tag_name), bygroups(Punctuation, - Name.Tag, Whitespace, Punctuation), '#pop'), - include('entity'), - include('text'), - ] - - def plaintext_tag_rules(tag_name): - return [ - (r'(?si)(.*?)()'.format(tag_name), bygroups(Text, - Punctuation, Name.Tag, Whitespace, Punctuation), '#pop'), - ] - - def delegate_tag_rules(tag_name, lexer): - return [ - (r'(?i)()'.format(tag_name), bygroups(Punctuation, - Name.Tag, Whitespace, Punctuation), '#pop'), - (r'(?si).+?(?=)'.format(tag_name), using(lexer)), - ] - - def text_rules(token): - return [ - (r'\w+', token), - (r'[^\S\n]+', token), - (r'(?s).', token), - ] - - def handle_syntaxhighlight(self, match, ctx): - from pygments.lexers import get_lexer_by_name - - attr_content = match.group() - start = 0 - index = 0 - while True: - index = attr_content.find('>', start) - # Exclude comment end (-->) - if attr_content[index-2:index] != '--': - break - start = index + 1 - - if index == -1: - # No tag end - yield from self.get_tokens_unprocessed(attr_content, stack=['root', 'attr']) - return - attr = attr_content[:index] - yield from self.get_tokens_unprocessed(attr, stack=['root', 'attr']) - yield match.start(3) + index, Punctuation, '>' - - lexer = None - content = attr_content[index+1:] - lang_match = re.findall(r'\blang=("|\'|)(\w+)(\1)', attr) - - if len(lang_match) >= 1: - # Pick the last match in case of multiple matches - lang = lang_match[-1][1] - try: - lexer = get_lexer_by_name(lang) - except ClassNotFound: - pass - - if lexer is None: - yield match.start() + index + 1, Text, content - else: - yield from lexer.get_tokens_unprocessed(content) - - def handle_score(self, match, ctx): - attr_content = match.group() - start = 0 - index = 0 - while True: - index = attr_content.find('>', start) - # Exclude comment end (-->) - if attr_content[index-2:index] != '--': - break - start = index + 1 - - if index == -1: - # No tag end - yield from self.get_tokens_unprocessed(attr_content, stack=['root', 'attr']) - return - attr = attr_content[:index] - content = attr_content[index+1:] - yield from self.get_tokens_unprocessed(attr, stack=['root', 'attr']) - yield match.start(3) + index, Punctuation, '>' - - lang_match = re.findall(r'\blang=("|\'|)(\w+)(\1)', attr) - # Pick the last match in case of multiple matches - lang = lang_match[-1][1] if len(lang_match) >= 1 else 'lilypond' - - if lang == 'lilypond': # Case sensitive - yield from LilyPondLexer().get_tokens_unprocessed(content) - else: # ABC - # FIXME: Use ABC lexer in the future - yield match.start() + index + 1, Text, content - - # a-z removed to prevent linter from complaining, REMEMBER to use (?i) - title_char = r' %!"$&\'()*,\-./0-9:;=?@A-Z\\\^_`~+\u0080-\uFFFF' - nbsp_char = r'(?:\t| |&\#0*160;|&\#[Xx]0*[Aa]0;|[ \xA0\u1680\u2000-\u200A\u202F\u205F\u3000])' - link_address = r'(?:[0-9.]+|\[[0-9a-f:.]+\]|[^\x00-\x20"<>\[\]\x7F\xA0\u1680\u2000-\u200A\u202F\u205F\u3000\uFFFD])' - link_char_class = r'[^\x00-\x20"<>\[\]\x7F\xA0\u1680\u2000-\u200A\u202F\u205F\u3000\uFFFD]' - double_slashes_i = { - '__FORCETOC__', '__NOCONTENTCONVERT__', '__NOCC__', '__NOEDITSECTION__', '__NOGALLERY__', - '__NOTITLECONVERT__', '__NOTC__', '__NOTOC__', '__TOC__', - } - double_slashes = { - '__EXPECTUNUSEDCATEGORY__', '__HIDDENCAT__', '__INDEX__', '__NEWSECTIONLINK__', - '__NOINDEX__', '__NONEWSECTIONLINK__', '__STATICREDIRECT__', '__NOGLOBAL__', - '__DISAMBIG__', '__EXPECTED_UNCONNECTED_PAGE__', - } - protocols = { - 'bitcoin:', 'ftp://', 'ftps://', 'geo:', 'git://', 'gopher://', 'http://', 'https://', - 'irc://', 'ircs://', 'magnet:', 'mailto:', 'mms://', 'news:', 'nntp://', 'redis://', - 'sftp://', 'sip:', 'sips:', 'sms:', 'ssh://', 'svn://', 'tel:', 'telnet://', 'urn:', - 'worldwind://', 'xmpp:', '//', - } - non_relative_protocols = protocols - {'//'} - html_tags = { - 'abbr', 'b', 'bdi', 'bdo', 'big', 'blockquote', 'br', 'caption', 'center', 'cite', 'code', - 'data', 'dd', 'del', 'dfn', 'div', 'dl', 'dt', 'em', 'font', 'h1', 'h2', 'h3', 'h4', 'h5', - 'h6', 'hr', 'i', 'ins', 'kbd', 'li', 'link', 'mark', 'meta', 'ol', 'p', 'q', 'rb', 'rp', - 'rt', 'rtc', 'ruby', 's', 'samp', 'small', 'span', 'strike', 'strong', 'sub', 'sup', - 'table', 'td', 'th', 'time', 'tr', 'tt', 'u', 'ul', 'var', 'wbr', - } - parser_tags = { - 'graph', 'charinsert', 'rss', 'chem', 'categorytree', 'nowiki', 'inputbox', 'math', - 'hiero', 'score', 'pre', 'ref', 'translate', 'imagemap', 'templatestyles', 'languages', - 'noinclude', 'mapframe', 'section', 'poem', 'syntaxhighlight', 'includeonly', 'tvar', - 'onlyinclude', 'templatedata', 'langconvert', 'timeline', 'dynamicpagelist', 'gallery', - 'maplink', 'ce', 'references', - } - variant_langs = { - # ZhConverter.php - 'zh', 'zh-hans', 'zh-hant', 'zh-cn', 'zh-hk', 'zh-mo', 'zh-my', 'zh-sg', 'zh-tw', - # WuuConverter.php - 'wuu', 'wuu-hans', 'wuu-hant', - # UzConverter.php - 'uz', 'uz-latn', 'uz-cyrl', - # TlyConverter.php - 'tly', 'tly-cyrl', - # TgConverter.php - 'tg', 'tg-latn', - # SrConverter.php - 'sr', 'sr-ec', 'sr-el', - # ShiConverter.php - 'shi', 'shi-tfng', 'shi-latn', - # ShConverter.php - 'sh-latn', 'sh-cyrl', - # KuConverter.php - 'ku', 'ku-arab', 'ku-latn', - # KkConverter.php - 'kk', 'kk-cyrl', 'kk-latn', 'kk-arab', 'kk-kz', 'kk-tr', 'kk-cn', - # IuConverter.php - 'iu', 'ike-cans', 'ike-latn', - # GanConverter.php - 'gan', 'gan-hans', 'gan-hant', - # EnConverter.php - 'en', 'en-x-piglatin', - # CrhConverter.php - 'crh', 'crh-cyrl', 'crh-latn', - # BanConverter.php - 'ban', 'ban-bali', 'ban-x-dharma', 'ban-x-palmleaf', 'ban-x-pku', - } - magic_vars_i = { - 'ARTICLEPATH', 'INT', 'PAGEID', 'SCRIPTPATH', 'SERVER', 'SERVERNAME', 'STYLEPATH', - } - magic_vars = { - '!', '=', 'BASEPAGENAME', 'BASEPAGENAMEE', 'CASCADINGSOURCES', 'CONTENTLANGUAGE', - 'CONTENTLANG', 'CURRENTDAY', 'CURRENTDAY2', 'CURRENTDAYNAME', 'CURRENTDOW', 'CURRENTHOUR', - 'CURRENTMONTH', 'CURRENTMONTH2', 'CURRENTMONTH1', 'CURRENTMONTHABBREV', 'CURRENTMONTHNAME', - 'CURRENTMONTHNAMEGEN', 'CURRENTTIME', 'CURRENTTIMESTAMP', 'CURRENTVERSION', 'CURRENTWEEK', - 'CURRENTYEAR', 'DIRECTIONMARK', 'DIRMARK', 'FULLPAGENAME', 'FULLPAGENAMEE', 'LOCALDAY', - 'LOCALDAY2', 'LOCALDAYNAME', 'LOCALDOW', 'LOCALHOUR', 'LOCALMONTH', 'LOCALMONTH2', - 'LOCALMONTH1', 'LOCALMONTHABBREV', 'LOCALMONTHNAME', 'LOCALMONTHNAMEGEN', 'LOCALTIME', - 'LOCALTIMESTAMP', 'LOCALWEEK', 'LOCALYEAR', 'NAMESPACE', 'NAMESPACEE', 'NAMESPACENUMBER', - 'NUMBEROFACTIVEUSERS', 'NUMBEROFADMINS', 'NUMBEROFARTICLES', 'NUMBEROFEDITS', - 'NUMBEROFFILES', 'NUMBEROFPAGES', 'NUMBEROFUSERS', 'PAGELANGUAGE', 'PAGENAME', 'PAGENAMEE', - 'REVISIONDAY', 'REVISIONDAY2', 'REVISIONID', 'REVISIONMONTH', 'REVISIONMONTH1', - 'REVISIONSIZE', 'REVISIONTIMESTAMP', 'REVISIONUSER', 'REVISIONYEAR', 'ROOTPAGENAME', - 'ROOTPAGENAMEE', 'SITENAME', 'SUBJECTPAGENAME', 'ARTICLEPAGENAME', 'SUBJECTPAGENAMEE', - 'ARTICLEPAGENAMEE', 'SUBJECTSPACE', 'ARTICLESPACE', 'SUBJECTSPACEE', 'ARTICLESPACEE', - 'SUBPAGENAME', 'SUBPAGENAMEE', 'TALKPAGENAME', 'TALKPAGENAMEE', 'TALKSPACE', 'TALKSPACEE', - } - parser_functions_i = { - 'ANCHORENCODE', 'BIDI', 'CANONICALURL', 'CANONICALURLE', 'FILEPATH', 'FORMATNUM', - 'FULLURL', 'FULLURLE', 'GENDER', 'GRAMMAR', 'INT', r'\#LANGUAGE', 'LC', 'LCFIRST', 'LOCALURL', - 'LOCALURLE', 'NS', 'NSE', 'PADLEFT', 'PADRIGHT', 'PAGEID', 'PLURAL', 'UC', 'UCFIRST', - 'URLENCODE', - } - parser_functions = { - 'BASEPAGENAME', 'BASEPAGENAMEE', 'CASCADINGSOURCES', 'DEFAULTSORT', 'DEFAULTSORTKEY', - 'DEFAULTCATEGORYSORT', 'FULLPAGENAME', 'FULLPAGENAMEE', 'NAMESPACE', 'NAMESPACEE', - 'NAMESPACENUMBER', 'NUMBERINGROUP', 'NUMINGROUP', 'NUMBEROFACTIVEUSERS', 'NUMBEROFADMINS', - 'NUMBEROFARTICLES', 'NUMBEROFEDITS', 'NUMBEROFFILES', 'NUMBEROFPAGES', 'NUMBEROFUSERS', - 'PAGENAME', 'PAGENAMEE', 'PAGESINCATEGORY', 'PAGESINCAT', 'PAGESIZE', 'PROTECTIONEXPIRY', - 'PROTECTIONLEVEL', 'REVISIONDAY', 'REVISIONDAY2', 'REVISIONID', 'REVISIONMONTH', - 'REVISIONMONTH1', 'REVISIONTIMESTAMP', 'REVISIONUSER', 'REVISIONYEAR', 'ROOTPAGENAME', - 'ROOTPAGENAMEE', 'SUBJECTPAGENAME', 'ARTICLEPAGENAME', 'SUBJECTPAGENAMEE', - 'ARTICLEPAGENAMEE', 'SUBJECTSPACE', 'ARTICLESPACE', 'SUBJECTSPACEE', 'ARTICLESPACEE', - 'SUBPAGENAME', 'SUBPAGENAMEE', 'TALKPAGENAME', 'TALKPAGENAMEE', 'TALKSPACE', 'TALKSPACEE', - 'INT', 'DISPLAYTITLE', 'PAGESINNAMESPACE', 'PAGESINNS', - } - - tokens = { - 'root': [ - # Redirects - (r"""(?xi) - (\A\s*?)(\#REDIRECT:?) # may contain a colon - (\s+)(\[\[) (?=[^\]\n]* \]\]$) - """, - bygroups(Whitespace, Keyword, Whitespace, Punctuation), 'redirect-inner'), - # Subheadings - (r'^(={2,6})(.+?)(\1)(\s*$\n)', - bygroups(Generic.Subheading, Generic.Subheading, Generic.Subheading, Whitespace)), - # Headings - (r'^(=.+?=)(\s*$\n)', - bygroups(Generic.Heading, Whitespace)), - # Double-slashed magic words - (words(double_slashes_i, prefix=r'(?i)'), Name.Function.Magic), - (words(double_slashes), Name.Function.Magic), - # Raw URLs - (r'(?i)\b(?:{}){}{}*'.format('|'.join(protocols), - link_address, link_char_class), Name.Label), - # Magic links - (r'\b(?:RFC|PMID){}+[0-9]+\b'.format(nbsp_char), - Name.Function.Magic), - (r"""(?x) - \bISBN {nbsp_char} - (?: 97[89] {nbsp_dash}? )? - (?: [0-9] {nbsp_dash}? ){{9}} # escape format() - [0-9Xx]\b - """.format(nbsp_char=nbsp_char, nbsp_dash=f'(?:-|{nbsp_char})'), Name.Function.Magic), - include('list'), - include('inline'), - include('text'), - ], - 'redirect-inner': [ - (r'(\]\])(\s*?\n)', bygroups(Punctuation, Whitespace), '#pop'), - (r'(\#)([^#]*?)', bygroups(Punctuation, Name.Label)), - (r'(?i)[{}]+'.format(title_char), Name.Tag), - ], - 'list': [ - # Description lists - (r'^;', Keyword, 'dt'), - # Ordered lists, unordered lists and indents - (r'^[#:*]+', Keyword), - # Horizontal rules - (r'^-{4,}', Keyword), - ], - 'inline': [ - # Signatures - (r'~{3,5}', Keyword), - # Entities - include('entity'), - # Bold & italic - (r"('')(''')(?!')", bygroups(Generic.Emph, - Generic.EmphStrong), 'inline-italic-bold'), - (r"'''(?!')", Generic.Strong, 'inline-bold'), - (r"''(?!')", Generic.Emph, 'inline-italic'), - # Comments & parameters & templates - include('replaceable'), - # Media links - ( - r"""(?xi) - (\[\[) - (File|Image) (:) - ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | )*) - (?: (\#) ([%s]*?) )? - """ % (title_char, f'{title_char}#'), - bygroups(Punctuation, Name.Namespace, Punctuation, - using(this, state=['wikilink-name']), Punctuation, Name.Label), - 'medialink-inner' - ), - # Wikilinks - ( - r"""(?xi) - (\[\[)(?!%s) # Should not contain URLs - (?: ([%s]*) (:))? - ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | )*?) - (?: (\#) ([%s]*?) )? - (\]\]) - """ % ('|'.join(protocols), title_char.replace('/', ''), - title_char, f'{title_char}#'), - bygroups(Punctuation, Name.Namespace, Punctuation, - using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation) - ), - ( - r"""(?xi) - (\[\[)(?!%s) - (?: ([%s]*) (:))? - ((?: [%s] | \{{2,3}[^{}]*?\}{2,3} | )*?) - (?: (\#) ([%s]*?) )? - (\|) - """ % ('|'.join(protocols), title_char.replace('/', ''), - title_char, f'{title_char}#'), - bygroups(Punctuation, Name.Namespace, Punctuation, - using(this, state=['wikilink-name']), Punctuation, Name.Label, Punctuation), - 'wikilink-inner' - ), - # External links - ( - r"""(?xi) - (\[) - ((?:{}) {} {}*) - (\s*) - """.format('|'.join(protocols), link_address, link_char_class), - bygroups(Punctuation, Name.Label, Whitespace), - 'extlink-inner' - ), - # Tables - (r'^(:*)(\s*?)(\{\|)([^\n]*)$', bygroups(Keyword, - Whitespace, Punctuation, using(this, state=['root', 'attr'])), 'table'), - # HTML tags - (r'(?i)(<)({})\b'.format('|'.join(html_tags)), - bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'), - (r'(?i)()'.format('|'.join(html_tags)), - bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)), - # - (r'(?i)(<)(nowiki)\b', bygroups(Punctuation, - Name.Tag), ('tag-nowiki', 'tag-inner')), - #
-            (r'(?i)(<)(pre)\b', bygroups(Punctuation,
-             Name.Tag), ('tag-pre', 'tag-inner')),
-            # 
-            (r'(?i)(<)(categorytree)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-categorytree', 'tag-inner')),
-            # 
-            (r'(?i)(<)(hiero)\b', bygroups(Punctuation,
-             Name.Tag), ('tag-hiero', 'tag-inner')),
-            # 
-            (r'(?i)(<)(math)\b', bygroups(Punctuation,
-             Name.Tag), ('tag-math', 'tag-inner')),
-            # 
-            (r'(?i)(<)(chem)\b', bygroups(Punctuation,
-             Name.Tag), ('tag-chem', 'tag-inner')),
-            # 
-            (r'(?i)(<)(ce)\b', bygroups(Punctuation,
-             Name.Tag), ('tag-ce', 'tag-inner')),
-            # 
-            (r'(?i)(<)(charinsert)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-charinsert', 'tag-inner')),
-            # 
-            (r'(?i)(<)(templatedata)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-templatedata', 'tag-inner')),
-            # 
-            (r'(?i)(<)(gallery)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-gallery', 'tag-inner')),
-            # 
-            (r'(?i)(<)(gallery)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-graph', 'tag-inner')),
-            # 
-            (r'(?i)(<)(dynamicpagelist)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-dynamicpagelist', 'tag-inner')),
-            # 
-            (r'(?i)(<)(inputbox)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-inputbox', 'tag-inner')),
-            # 
-            (r'(?i)(<)(rss)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-rss', 'tag-inner')),
-            # 
-            (r'(?i)(<)(imagemap)\b', bygroups(
-                Punctuation, Name.Tag), ('tag-imagemap', 'tag-inner')),
-            # 
-            (r'(?i)()',
-             bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
-            (r'(?si)(<)(syntaxhighlight)\b([^>]*?(?.*?)(?=)',
-             bygroups(Punctuation, Name.Tag, handle_syntaxhighlight)),
-            # : Fallback case for self-closing tags
-            (r'(?i)(<)(syntaxhighlight)\b(\s*?)((?:[^>]|-->)*?)(/\s*?(?)*?)(/\s*?(?)*?)(/\s*?(?|\Z)', Comment.Multiline),
-            # Parameters
-            (
-                r"""(?x)
-                (\{{3})
-                    ([^|]*?)
-                    (?=\}{3}|\|)
-                """,
-                bygroups(Punctuation, Name.Variable),
-                'parameter-inner',
-            ),
-            # Magic variables
-            (r'(?i)(\{\{)(\s*)(%s)(\s*)(\}\})' % '|'.join(magic_vars_i),
-             bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
-            (r'(\{\{)(\s*)(%s)(\s*)(\}\})' % '|'.join(magic_vars),
-                bygroups(Punctuation, Whitespace, Name.Function, Whitespace, Punctuation)),
-            # Parser functions & templates
-            (r'\{\{', Punctuation, 'template-begin-space'),
-            #  legacy syntax
-            (r'(?i)(<)(tvar)\b(\|)([^>]*?)(>)', bygroups(Punctuation,
-             Name.Tag, Punctuation, String, Punctuation)),
-            (r'', Punctuation, '#pop'),
-            # 
-            (r'(?i)(<)(tvar)\b', bygroups(Punctuation, Name.Tag), 'tag-inner-ordinary'),
-            (r'(?i)()',
-             bygroups(Punctuation, Name.Tag, Whitespace, Punctuation)),
-        ],
-        'parameter-inner': [
-            (r'\}{3}', Punctuation, '#pop'),
-            (r'\|', Punctuation),
-            include('inline'),
-            include('text'),
-        ],
-        'template-begin-space': [
-            # Templates allow line breaks at the beginning, and due to how MediaWiki handles
-            # comments, an extra state is required to handle things like {{\n\n name}}
-            (r'|\Z)', Comment.Multiline),
-            (r'\s+', Whitespace),
-            # Parser functions
-            (
-                r'(?i)(\#[%s]*?|%s)(:)' % (title_char,
-                                           '|'.join(parser_functions_i)),
-                bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
-            ),
-            (
-                r'(%s)(:)' % ('|'.join(parser_functions)),
-                bygroups(Name.Function, Punctuation), ('#pop', 'template-inner')
-            ),
-            # Templates
-            (
-                r'(?i)([%s]*?)(:)' % title_char,
-                bygroups(Name.Namespace, Punctuation), ('#pop', 'template-name')
-            ),
-            default(('#pop', 'template-name'),),
-        ],
-        'template-name': [
-            (r'(\s*?)(\|)', bygroups(Text, Punctuation), ('#pop', 'template-inner')),
-            (r'\}\}', Punctuation, '#pop'),
-            (r'\n', Text, '#pop'),
-            include('replaceable'),
-            *text_rules(Name.Tag),
-        ],
-        'template-inner': [
-            (r'\}\}', Punctuation, '#pop'),
-            (r'\|', Punctuation),
-            (
-                r"""(?x)
-                    (?<=\|)
-                    ( (?: (?! \{\{ | \}\} )[^=\|<])*? ) # Exclude templates and tags
-                    (=)
-                """,
-                bygroups(Name.Label, Operator)
-            ),
-            include('inline'),
-            include('text'),
-        ],
-        'table': [
-            # Use [ \t\n\r\0\x0B] instead of \s to follow PHP trim() behavior
-            # Endings
-            (r'^([ \t\n\r\0\x0B]*?)(\|\})',
-             bygroups(Whitespace, Punctuation), '#pop'),
-            # Table rows
-            (r'^([ \t\n\r\0\x0B]*?)(\|-+)(.*)$', bygroups(Whitespace, Punctuation,
-             using(this, state=['root', 'attr']))),
-            # Captions
-            (
-                r"""(?x)
-                ^([ \t\n\r\0\x0B]*?)(\|\+)
-                # Exclude links, template and tags
-                (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|) )?
-                (.*?)$
-                """,
-                bygroups(Whitespace, Punctuation, using(this, state=[
-                         'root', 'attr']), Punctuation, Generic.Heading),
-            ),
-            # Table data
-            (
-                r"""(?x)
-                ( ^(?:[ \t\n\r\0\x0B]*?)\| | \|\| )
-                (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|)(?!\|) )?
-                """,
-                bygroups(Punctuation, using(this, state=[
-                         'root', 'attr']), Punctuation),
-            ),
-            # Table headers
-            (
-                r"""(?x)
-                ( ^(?:[ \t\n\r\0\x0B]*?)!  )
-                (?: ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )(\|)(?!\|) )?
-                """,
-                bygroups(Punctuation, using(this, state=[
-                         'root', 'attr']), Punctuation),
-                'table-header',
-            ),
-            include('list'),
-            include('inline'),
-            include('text'),
-        ],
-        'table-header': [
-            # Requires another state for || handling inside headers
-            (r'\n', Text, '#pop'),
-            (
-                r"""(?x)
-                (!!|\|\|)
-                (?:
-                    ( (?: (?! \[\[ | \{\{ )[^|\n<] )*? )
-                    (\|)(?!\|)
-                )?
-                """,
-                bygroups(Punctuation, using(this, state=[
-                         'root', 'attr']), Punctuation)
-            ),
-            *text_rules(Generic.Subheading),
-        ],
-        'entity': [
-            (r'&\S*?;', Name.Entity),
-        ],
-        'dt': [
-            (r'\n', Text, '#pop'),
-            include('inline'),
-            (r':', Keyword, '#pop'),
-            include('text'),
-        ],
-        'extlink-inner': [
-            (r'\]', Punctuation, '#pop'),
-            include('inline'),
-            include('text'),
-        ],
-        'nowiki-ish': [
-            include('entity'),
-            include('text'),
-        ],
-        'attr': [
-            include('replaceable'),
-            (r'\s+', Whitespace),
-            (r'(=)(\s*)(")', bygroups(Operator, Whitespace, String.Double), 'attr-val-2'),
-            (r"(=)(\s*)(')", bygroups(Operator, Whitespace, String.Single), 'attr-val-1'),
-            (r'(=)(\s*)', bygroups(Operator, Whitespace), 'attr-val-0'),
-            (r'[\w:-]+', Name.Attribute),
-
-        ],
-        'attr-val-0': [
-            (r'\s', Whitespace, '#pop'),
-            include('replaceable'),
-            *text_rules(String),
-        ],
-        'attr-val-1': [
-            (r"'", String.Single, '#pop'),
-            include('replaceable'),
-            *text_rules(String.Single),
-        ],
-        'attr-val-2': [
-            (r'"', String.Double, '#pop'),
-            include('replaceable'),
-            *text_rules(String.Double),
-        ],
-        'tag-inner-ordinary': [
-            (r'/?\s*>', Punctuation, '#pop'),
-            include('tag-attr'),
-        ],
-        'tag-inner': [
-            # Return to root state for self-closing tags
-            (r'/\s*>', Punctuation, '#pop:2'),
-            (r'\s*>', Punctuation, '#pop'),
-            include('tag-attr'),
-        ],
-        # There states below are just like their non-tag variants, the key difference is
-        # they forcibly quit when encountering tag closing markup
-        'tag-attr': [
-            include('replaceable'),
-            (r'\s+', Whitespace),
-            (r'(=)(\s*)(")', bygroups(Operator,
-             Whitespace, String.Double), 'tag-attr-val-2'),
-            (r"(=)(\s*)(')", bygroups(Operator,
-             Whitespace, String.Single), 'tag-attr-val-1'),
-            (r'(=)(\s*)', bygroups(Operator, Whitespace), 'tag-attr-val-0'),
-            (r'[\w:-]+', Name.Attribute),
-
-        ],
-        'tag-attr-val-0': [
-            (r'\s', Whitespace, '#pop'),
-            (r'/?>', Punctuation, '#pop:2'),
-            include('replaceable'),
-            *text_rules(String),
-        ],
-        'tag-attr-val-1': [
-            (r"'", String.Single, '#pop'),
-            (r'/?>', Punctuation, '#pop:2'),
-            include('replaceable'),
-            *text_rules(String.Single),
-        ],
-        'tag-attr-val-2': [
-            (r'"', String.Double, '#pop'),
-            (r'/?>', Punctuation, '#pop:2'),
-            include('replaceable'),
-            *text_rules(String.Double),
-        ],
-        'tag-nowiki': nowiki_tag_rules('nowiki'),
-        'tag-pre': nowiki_tag_rules('pre'),
-        'tag-categorytree': plaintext_tag_rules('categorytree'),
-        'tag-dynamicpagelist': plaintext_tag_rules('dynamicpagelist'),
-        'tag-hiero': plaintext_tag_rules('hiero'),
-        'tag-inputbox': plaintext_tag_rules('inputbox'),
-        'tag-imagemap': plaintext_tag_rules('imagemap'),
-        'tag-charinsert': plaintext_tag_rules('charinsert'),
-        'tag-timeline': plaintext_tag_rules('timeline'),
-        'tag-gallery': plaintext_tag_rules('gallery'),
-        'tag-graph': plaintext_tag_rules('graph'),
-        'tag-rss': plaintext_tag_rules('rss'),
-        'tag-math': delegate_tag_rules('math', TexLexer),
-        'tag-chem': delegate_tag_rules('chem', TexLexer),
-        'tag-ce': delegate_tag_rules('ce', TexLexer),
-        'tag-templatedata': delegate_tag_rules('templatedata', JsonLexer),
-        'text-italic': text_rules(Generic.Emph),
-        'text-bold': text_rules(Generic.Strong),
-        'text-bold-italic': text_rules(Generic.EmphStrong),
-        'text': text_rules(Text),
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/math.py b/venv/lib/python3.11/site-packages/pygments/lexers/math.py
deleted file mode 100644
index 530f853..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/math.py
+++ /dev/null
@@ -1,20 +0,0 @@
-"""
-    pygments.lexers.math
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Just export lexers that were contained in this module.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.python import NumPyLexer
-from pygments.lexers.matlab import MatlabLexer, MatlabSessionLexer, \
-    OctaveLexer, ScilabLexer
-from pygments.lexers.julia import JuliaLexer, JuliaConsoleLexer
-from pygments.lexers.r import RConsoleLexer, SLexer, RdLexer
-from pygments.lexers.modeling import BugsLexer, JagsLexer, StanLexer
-from pygments.lexers.idl import IDLLexer
-from pygments.lexers.algebra import MuPADLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/matlab.py b/venv/lib/python3.11/site-packages/pygments/lexers/matlab.py
deleted file mode 100644
index 753a6ef..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/matlab.py
+++ /dev/null
@@ -1,3308 +0,0 @@
-"""
-    pygments.lexers.matlab
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Matlab and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, bygroups, default, words, \
-    do_insertions, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Generic, Whitespace
-
-from pygments.lexers import _scilab_builtins
-
-__all__ = ['MatlabLexer', 'MatlabSessionLexer', 'OctaveLexer', 'ScilabLexer']
-
-
-class MatlabLexer(RegexLexer):
-    """
-    For Matlab source code.
-
-    .. versionadded:: 0.10
-    """
-    name = 'Matlab'
-    aliases = ['matlab']
-    filenames = ['*.m']
-    mimetypes = ['text/matlab']
-
-    _operators = r'-|==|~=|<=|>=|<|>|&&|&|~|\|\|?|\.\*|\*|\+|\.\^|\.\\|\./|/|\\'
-
-    tokens = {
-        'expressions': [
-            # operators:
-            (_operators, Operator),
-
-            # numbers (must come before punctuation to handle `.5`; cannot use
-            # `\b` due to e.g. `5. + .5`).  The negative lookahead on operators
-            # avoids including the dot in `1./x` (the dot is part of `./`).
-            (r'(? and then
-            # (equal | open-parenthesis |  | ).
-            (r'(?:^|(?<=;))(\s*)(\w+)(\s+)(?!=|\(|%s\s|\s)' % _operators,
-             bygroups(Whitespace, Name, Whitespace), 'commandargs'),
-
-            include('expressions')
-        ],
-        'blockcomment': [
-            (r'^\s*%\}', Comment.Multiline, '#pop'),
-            (r'^.*\n', Comment.Multiline),
-            (r'.', Comment.Multiline),
-        ],
-        'deffunc': [
-            (r'(\s*)(?:(\S+)(\s*)(=)(\s*))?(.+)(\()(.*)(\))(\s*)',
-             bygroups(Whitespace, Text, Whitespace, Punctuation,
-                      Whitespace, Name.Function, Punctuation, Text,
-                      Punctuation, Whitespace), '#pop'),
-            # function with no args
-            (r'(\s*)([a-zA-Z_]\w*)',
-             bygroups(Whitespace, Name.Function), '#pop'),
-        ],
-        'propattrs': [
-            (r'(\w+)(\s*)(=)(\s*)(\d+)',
-             bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
-                      Number)),
-            (r'(\w+)(\s*)(=)(\s*)([a-zA-Z]\w*)',
-             bygroups(Name.Builtin, Whitespace, Punctuation, Whitespace,
-                      Keyword)),
-            (r',', Punctuation),
-            (r'\)', Punctuation, '#pop'),
-            (r'\s+', Whitespace),
-            (r'.', Text),
-        ],
-        'defprops': [
-            (r'%\{\s*\n', Comment.Multiline, 'blockcomment'),
-            (r'%.*$', Comment),
-            (r'(?.
-
-    .. versionadded:: 0.10
-    """
-    name = 'Matlab session'
-    aliases = ['matlabsession']
-
-    def get_tokens_unprocessed(self, text):
-        mlexer = MatlabLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        continuation = False
-
-        for match in line_re.finditer(text):
-            line = match.group()
-
-            if line.startswith('>> '):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:3])]))
-                curcode += line[3:]
-
-            elif line.startswith('>>'):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:2])]))
-                curcode += line[2:]
-
-            elif line.startswith('???'):
-
-                idx = len(curcode)
-
-                # without is showing error on same line as before...?
-                # line = "\n" + line
-                token = (0, Generic.Traceback, line)
-                insertions.append((idx, [token]))
-            elif continuation and insertions:
-                # line_start is the length of the most recent prompt symbol
-                line_start = len(insertions[-1][-1][-1])
-                # Set leading spaces with the length of the prompt to be a generic prompt
-                # This keeps code aligned when prompts are removed, say with some Javascript
-                if line.startswith(' '*line_start):
-                    insertions.append(
-                        (len(curcode), [(0, Generic.Prompt, line[:line_start])]))
-                    curcode += line[line_start:]
-                else:
-                    curcode += line
-            else:
-                if curcode:
-                    yield from do_insertions(
-                        insertions, mlexer.get_tokens_unprocessed(curcode))
-                    curcode = ''
-                    insertions = []
-
-                yield match.start(), Generic.Output, line
-
-            # Does not allow continuation if a comment is included after the ellipses.
-            # Continues any line that ends with ..., even comments (lines that start with %)
-            if line.strip().endswith('...'):
-                continuation = True
-            else:
-                continuation = False
-
-        if curcode:  # or item:
-            yield from do_insertions(
-                insertions, mlexer.get_tokens_unprocessed(curcode))
-
-
-class OctaveLexer(RegexLexer):
-    """
-    For GNU Octave source code.
-
-    .. versionadded:: 1.5
-    """
-    name = 'Octave'
-    url = 'https://www.gnu.org/software/octave/index'
-    aliases = ['octave']
-    filenames = ['*.m']
-    mimetypes = ['text/octave']
-
-    # These lists are generated automatically.
-    # Run the following in bash shell:
-    #
-    # First dump all of the Octave manual into a plain text file:
-    #
-    #   $ info octave --subnodes -o octave-manual
-    #
-    # Now grep through it:
-
-    # for i in \
-    #     "Built-in Function" "Command" "Function File" \
-    #     "Loadable Function" "Mapping Function";
-    # do
-    #     perl -e '@name = qw('"$i"');
-    #              print lc($name[0]),"_kw = [\n"';
-    #
-    #     perl -n -e 'print "\"$1\",\n" if /-- '"$i"': .* (\w*) \(/;' \
-    #         octave-manual | sort | uniq ;
-    #     echo "]" ;
-    #     echo;
-    # done
-
-    # taken from Octave Mercurial changeset 8cc154f45e37 (30-jan-2011)
-
-    builtin_kw = (
-        "addlistener", "addpath", "addproperty", "all",
-        "and", "any", "argnames", "argv", "assignin",
-        "atexit", "autoload",
-        "available_graphics_toolkits", "beep_on_error",
-        "bitand", "bitmax", "bitor", "bitshift", "bitxor",
-        "cat", "cell", "cellstr", "char", "class", "clc",
-        "columns", "command_line_path",
-        "completion_append_char", "completion_matches",
-        "complex", "confirm_recursive_rmdir", "cputime",
-        "crash_dumps_octave_core", "ctranspose", "cumprod",
-        "cumsum", "debug_on_error", "debug_on_interrupt",
-        "debug_on_warning", "default_save_options",
-        "dellistener", "diag", "diff", "disp",
-        "doc_cache_file", "do_string_escapes", "double",
-        "drawnow", "e", "echo_executing_commands", "eps",
-        "eq", "errno", "errno_list", "error", "eval",
-        "evalin", "exec", "exist", "exit", "eye", "false",
-        "fclear", "fclose", "fcntl", "fdisp", "feof",
-        "ferror", "feval", "fflush", "fgetl", "fgets",
-        "fieldnames", "file_in_loadpath", "file_in_path",
-        "filemarker", "filesep", "find_dir_in_path",
-        "fixed_point_format", "fnmatch", "fopen", "fork",
-        "formula", "fprintf", "fputs", "fread", "freport",
-        "frewind", "fscanf", "fseek", "fskipl", "ftell",
-        "functions", "fwrite", "ge", "genpath", "get",
-        "getegid", "getenv", "geteuid", "getgid",
-        "getpgrp", "getpid", "getppid", "getuid", "glob",
-        "gt", "gui_mode", "history_control",
-        "history_file", "history_size",
-        "history_timestamp_format_string", "home",
-        "horzcat", "hypot", "ifelse",
-        "ignore_function_time_stamp", "inferiorto",
-        "info_file", "info_program", "inline", "input",
-        "intmax", "intmin", "ipermute",
-        "is_absolute_filename", "isargout", "isbool",
-        "iscell", "iscellstr", "ischar", "iscomplex",
-        "isempty", "isfield", "isfloat", "isglobal",
-        "ishandle", "isieee", "isindex", "isinteger",
-        "islogical", "ismatrix", "ismethod", "isnull",
-        "isnumeric", "isobject", "isreal",
-        "is_rooted_relative_filename", "issorted",
-        "isstruct", "isvarname", "kbhit", "keyboard",
-        "kill", "lasterr", "lasterror", "lastwarn",
-        "ldivide", "le", "length", "link", "linspace",
-        "logical", "lstat", "lt", "make_absolute_filename",
-        "makeinfo_program", "max_recursion_depth", "merge",
-        "methods", "mfilename", "minus", "mislocked",
-        "mkdir", "mkfifo", "mkstemp", "mldivide", "mlock",
-        "mouse_wheel_zoom", "mpower", "mrdivide", "mtimes",
-        "munlock", "nargin", "nargout",
-        "native_float_format", "ndims", "ne", "nfields",
-        "nnz", "norm", "not", "numel", "nzmax",
-        "octave_config_info", "octave_core_file_limit",
-        "octave_core_file_name",
-        "octave_core_file_options", "ones", "or",
-        "output_max_field_width", "output_precision",
-        "page_output_immediately", "page_screen_output",
-        "path", "pathsep", "pause", "pclose", "permute",
-        "pi", "pipe", "plus", "popen", "power",
-        "print_empty_dimensions", "printf",
-        "print_struct_array_contents", "prod",
-        "program_invocation_name", "program_name",
-        "putenv", "puts", "pwd", "quit", "rats", "rdivide",
-        "readdir", "readlink", "read_readline_init_file",
-        "realmax", "realmin", "rehash", "rename",
-        "repelems", "re_read_readline_init_file", "reset",
-        "reshape", "resize", "restoredefaultpath",
-        "rethrow", "rmdir", "rmfield", "rmpath", "rows",
-        "save_header_format_string", "save_precision",
-        "saving_history", "scanf", "set", "setenv",
-        "shell_cmd", "sighup_dumps_octave_core",
-        "sigterm_dumps_octave_core", "silent_functions",
-        "single", "size", "size_equal", "sizemax",
-        "sizeof", "sleep", "source", "sparse_auto_mutate",
-        "split_long_rows", "sprintf", "squeeze", "sscanf",
-        "stat", "stderr", "stdin", "stdout", "strcmp",
-        "strcmpi", "string_fill_char", "strncmp",
-        "strncmpi", "struct", "struct_levels_to_print",
-        "strvcat", "subsasgn", "subsref", "sum", "sumsq",
-        "superiorto", "suppress_verbose_help_message",
-        "symlink", "system", "tic", "tilde_expand",
-        "times", "tmpfile", "tmpnam", "toc", "toupper",
-        "transpose", "true", "typeinfo", "umask", "uminus",
-        "uname", "undo_string_escapes", "unlink", "uplus",
-        "upper", "usage", "usleep", "vec", "vectorize",
-        "vertcat", "waitpid", "warning", "warranty",
-        "whos_line_format", "yes_or_no", "zeros",
-        "inf", "Inf", "nan", "NaN")
-
-    command_kw = ("close", "load", "who", "whos")
-
-    function_kw = (
-        "accumarray", "accumdim", "acosd", "acotd",
-        "acscd", "addtodate", "allchild", "ancestor",
-        "anova", "arch_fit", "arch_rnd", "arch_test",
-        "area", "arma_rnd", "arrayfun", "ascii", "asctime",
-        "asecd", "asind", "assert", "atand",
-        "autoreg_matrix", "autumn", "axes", "axis", "bar",
-        "barh", "bartlett", "bartlett_test", "beep",
-        "betacdf", "betainv", "betapdf", "betarnd",
-        "bicgstab", "bicubic", "binary", "binocdf",
-        "binoinv", "binopdf", "binornd", "bitcmp",
-        "bitget", "bitset", "blackman", "blanks",
-        "blkdiag", "bone", "box", "brighten", "calendar",
-        "cast", "cauchy_cdf", "cauchy_inv", "cauchy_pdf",
-        "cauchy_rnd", "caxis", "celldisp", "center", "cgs",
-        "chisquare_test_homogeneity",
-        "chisquare_test_independence", "circshift", "cla",
-        "clabel", "clf", "clock", "cloglog", "closereq",
-        "colon", "colorbar", "colormap", "colperm",
-        "comet", "common_size", "commutation_matrix",
-        "compan", "compare_versions", "compass",
-        "computer", "cond", "condest", "contour",
-        "contourc", "contourf", "contrast", "conv",
-        "convhull", "cool", "copper", "copyfile", "cor",
-        "corrcoef", "cor_test", "cosd", "cotd", "cov",
-        "cplxpair", "cross", "cscd", "cstrcat", "csvread",
-        "csvwrite", "ctime", "cumtrapz", "curl", "cut",
-        "cylinder", "date", "datenum", "datestr",
-        "datetick", "datevec", "dblquad", "deal",
-        "deblank", "deconv", "delaunay", "delaunayn",
-        "delete", "demo", "detrend", "diffpara", "diffuse",
-        "dir", "discrete_cdf", "discrete_inv",
-        "discrete_pdf", "discrete_rnd", "display",
-        "divergence", "dlmwrite", "dos", "dsearch",
-        "dsearchn", "duplication_matrix", "durbinlevinson",
-        "ellipsoid", "empirical_cdf", "empirical_inv",
-        "empirical_pdf", "empirical_rnd", "eomday",
-        "errorbar", "etime", "etreeplot", "example",
-        "expcdf", "expinv", "expm", "exppdf", "exprnd",
-        "ezcontour", "ezcontourf", "ezmesh", "ezmeshc",
-        "ezplot", "ezpolar", "ezsurf", "ezsurfc", "factor",
-        "factorial", "fail", "fcdf", "feather", "fftconv",
-        "fftfilt", "fftshift", "figure", "fileattrib",
-        "fileparts", "fill", "findall", "findobj",
-        "findstr", "finv", "flag", "flipdim", "fliplr",
-        "flipud", "fpdf", "fplot", "fractdiff", "freqz",
-        "freqz_plot", "frnd", "fsolve",
-        "f_test_regression", "ftp", "fullfile", "fzero",
-        "gamcdf", "gaminv", "gampdf", "gamrnd", "gca",
-        "gcbf", "gcbo", "gcf", "genvarname", "geocdf",
-        "geoinv", "geopdf", "geornd", "getfield", "ginput",
-        "glpk", "gls", "gplot", "gradient",
-        "graphics_toolkit", "gray", "grid", "griddata",
-        "griddatan", "gtext", "gunzip", "gzip", "hadamard",
-        "hamming", "hankel", "hanning", "hggroup",
-        "hidden", "hilb", "hist", "histc", "hold", "hot",
-        "hotelling_test", "housh", "hsv", "hurst",
-        "hygecdf", "hygeinv", "hygepdf", "hygernd",
-        "idivide", "ifftshift", "image", "imagesc",
-        "imfinfo", "imread", "imshow", "imwrite", "index",
-        "info", "inpolygon", "inputname", "interpft",
-        "interpn", "intersect", "invhilb", "iqr", "isa",
-        "isdefinite", "isdir", "is_duplicate_entry",
-        "isequal", "isequalwithequalnans", "isfigure",
-        "ishermitian", "ishghandle", "is_leap_year",
-        "isletter", "ismac", "ismember", "ispc", "isprime",
-        "isprop", "isscalar", "issquare", "isstrprop",
-        "issymmetric", "isunix", "is_valid_file_id",
-        "isvector", "jet", "kendall",
-        "kolmogorov_smirnov_cdf",
-        "kolmogorov_smirnov_test", "kruskal_wallis_test",
-        "krylov", "kurtosis", "laplace_cdf", "laplace_inv",
-        "laplace_pdf", "laplace_rnd", "legend", "legendre",
-        "license", "line", "linkprop", "list_primes",
-        "loadaudio", "loadobj", "logistic_cdf",
-        "logistic_inv", "logistic_pdf", "logistic_rnd",
-        "logit", "loglog", "loglogerr", "logm", "logncdf",
-        "logninv", "lognpdf", "lognrnd", "logspace",
-        "lookfor", "ls_command", "lsqnonneg", "magic",
-        "mahalanobis", "manova", "matlabroot",
-        "mcnemar_test", "mean", "meansq", "median", "menu",
-        "mesh", "meshc", "meshgrid", "meshz", "mexext",
-        "mget", "mkpp", "mode", "moment", "movefile",
-        "mpoles", "mput", "namelengthmax", "nargchk",
-        "nargoutchk", "nbincdf", "nbininv", "nbinpdf",
-        "nbinrnd", "nchoosek", "ndgrid", "newplot", "news",
-        "nonzeros", "normcdf", "normest", "norminv",
-        "normpdf", "normrnd", "now", "nthroot", "null",
-        "ocean", "ols", "onenormest", "optimget",
-        "optimset", "orderfields", "orient", "orth",
-        "pack", "pareto", "parseparams", "pascal", "patch",
-        "pathdef", "pcg", "pchip", "pcolor", "pcr",
-        "peaks", "periodogram", "perl", "perms", "pie",
-        "pink", "planerot", "playaudio", "plot",
-        "plotmatrix", "plotyy", "poisscdf", "poissinv",
-        "poisspdf", "poissrnd", "polar", "poly",
-        "polyaffine", "polyarea", "polyderiv", "polyfit",
-        "polygcd", "polyint", "polyout", "polyreduce",
-        "polyval", "polyvalm", "postpad", "powerset",
-        "ppder", "ppint", "ppjumps", "ppplot", "ppval",
-        "pqpnonneg", "prepad", "primes", "print",
-        "print_usage", "prism", "probit", "qp", "qqplot",
-        "quadcc", "quadgk", "quadl", "quadv", "quiver",
-        "qzhess", "rainbow", "randi", "range", "rank",
-        "ranks", "rat", "reallog", "realpow", "realsqrt",
-        "record", "rectangle_lw", "rectangle_sw",
-        "rectint", "refresh", "refreshdata",
-        "regexptranslate", "repmat", "residue", "ribbon",
-        "rindex", "roots", "rose", "rosser", "rotdim",
-        "rref", "run", "run_count", "rundemos", "run_test",
-        "runtests", "saveas", "saveaudio", "saveobj",
-        "savepath", "scatter", "secd", "semilogx",
-        "semilogxerr", "semilogy", "semilogyerr",
-        "setaudio", "setdiff", "setfield", "setxor",
-        "shading", "shift", "shiftdim", "sign_test",
-        "sinc", "sind", "sinetone", "sinewave", "skewness",
-        "slice", "sombrero", "sortrows", "spaugment",
-        "spconvert", "spdiags", "spearman", "spectral_adf",
-        "spectral_xdf", "specular", "speed", "spencer",
-        "speye", "spfun", "sphere", "spinmap", "spline",
-        "spones", "sprand", "sprandn", "sprandsym",
-        "spring", "spstats", "spy", "sqp", "stairs",
-        "statistics", "std", "stdnormal_cdf",
-        "stdnormal_inv", "stdnormal_pdf", "stdnormal_rnd",
-        "stem", "stft", "strcat", "strchr", "strjust",
-        "strmatch", "strread", "strsplit", "strtok",
-        "strtrim", "strtrunc", "structfun", "studentize",
-        "subplot", "subsindex", "subspace", "substr",
-        "substruct", "summer", "surf", "surface", "surfc",
-        "surfl", "surfnorm", "svds", "swapbytes",
-        "sylvester_matrix", "symvar", "synthesis", "table",
-        "tand", "tar", "tcdf", "tempdir", "tempname",
-        "test", "text", "textread", "textscan", "tinv",
-        "title", "toeplitz", "tpdf", "trace", "trapz",
-        "treelayout", "treeplot", "triangle_lw",
-        "triangle_sw", "tril", "trimesh", "triplequad",
-        "triplot", "trisurf", "triu", "trnd", "tsearchn",
-        "t_test", "t_test_regression", "type", "unidcdf",
-        "unidinv", "unidpdf", "unidrnd", "unifcdf",
-        "unifinv", "unifpdf", "unifrnd", "union", "unique",
-        "unix", "unmkpp", "unpack", "untabify", "untar",
-        "unwrap", "unzip", "u_test", "validatestring",
-        "vander", "var", "var_test", "vech", "ver",
-        "version", "view", "voronoi", "voronoin",
-        "waitforbuttonpress", "wavread", "wavwrite",
-        "wblcdf", "wblinv", "wblpdf", "wblrnd", "weekday",
-        "welch_test", "what", "white", "whitebg",
-        "wienrnd", "wilcoxon_test", "wilkinson", "winter",
-        "xlabel", "xlim", "ylabel", "yulewalker", "zip",
-        "zlabel", "z_test")
-
-    loadable_kw = (
-        "airy", "amd", "balance", "besselh", "besseli",
-        "besselj", "besselk", "bessely", "bitpack",
-        "bsxfun", "builtin", "ccolamd", "cellfun",
-        "cellslices", "chol", "choldelete", "cholinsert",
-        "cholinv", "cholshift", "cholupdate", "colamd",
-        "colloc", "convhulln", "convn", "csymamd",
-        "cummax", "cummin", "daspk", "daspk_options",
-        "dasrt", "dasrt_options", "dassl", "dassl_options",
-        "dbclear", "dbdown", "dbstack", "dbstatus",
-        "dbstop", "dbtype", "dbup", "dbwhere", "det",
-        "dlmread", "dmperm", "dot", "eig", "eigs",
-        "endgrent", "endpwent", "etree", "fft", "fftn",
-        "fftw", "filter", "find", "full", "gcd",
-        "getgrent", "getgrgid", "getgrnam", "getpwent",
-        "getpwnam", "getpwuid", "getrusage", "givens",
-        "gmtime", "gnuplot_binary", "hess", "ifft",
-        "ifftn", "inv", "isdebugmode", "issparse", "kron",
-        "localtime", "lookup", "lsode", "lsode_options",
-        "lu", "luinc", "luupdate", "matrix_type", "max",
-        "min", "mktime", "pinv", "qr", "qrdelete",
-        "qrinsert", "qrshift", "qrupdate", "quad",
-        "quad_options", "qz", "rand", "rande", "randg",
-        "randn", "randp", "randperm", "rcond", "regexp",
-        "regexpi", "regexprep", "schur", "setgrent",
-        "setpwent", "sort", "spalloc", "sparse", "spparms",
-        "sprank", "sqrtm", "strfind", "strftime",
-        "strptime", "strrep", "svd", "svd_driver", "syl",
-        "symamd", "symbfact", "symrcm", "time", "tsearch",
-        "typecast", "urlread", "urlwrite")
-
-    mapping_kw = (
-        "abs", "acos", "acosh", "acot", "acoth", "acsc",
-        "acsch", "angle", "arg", "asec", "asech", "asin",
-        "asinh", "atan", "atanh", "beta", "betainc",
-        "betaln", "bincoeff", "cbrt", "ceil", "conj", "cos",
-        "cosh", "cot", "coth", "csc", "csch", "erf", "erfc",
-        "erfcx", "erfinv", "exp", "finite", "fix", "floor",
-        "fmod", "gamma", "gammainc", "gammaln", "imag",
-        "isalnum", "isalpha", "isascii", "iscntrl",
-        "isdigit", "isfinite", "isgraph", "isinf",
-        "islower", "isna", "isnan", "isprint", "ispunct",
-        "isspace", "isupper", "isxdigit", "lcm", "lgamma",
-        "log", "lower", "mod", "real", "rem", "round",
-        "roundb", "sec", "sech", "sign", "sin", "sinh",
-        "sqrt", "tan", "tanh", "toascii", "tolower", "xor")
-
-    builtin_consts = (
-        "EDITOR", "EXEC_PATH", "I", "IMAGE_PATH", "NA",
-        "OCTAVE_HOME", "OCTAVE_VERSION", "PAGER",
-        "PAGER_FLAGS", "SEEK_CUR", "SEEK_END", "SEEK_SET",
-        "SIG", "S_ISBLK", "S_ISCHR", "S_ISDIR", "S_ISFIFO",
-        "S_ISLNK", "S_ISREG", "S_ISSOCK", "WCONTINUE",
-        "WCOREDUMP", "WEXITSTATUS", "WIFCONTINUED",
-        "WIFEXITED", "WIFSIGNALED", "WIFSTOPPED", "WNOHANG",
-        "WSTOPSIG", "WTERMSIG", "WUNTRACED")
-
-    tokens = {
-        'root': [
-            (r'%\{\s*\n', Comment.Multiline, 'percentblockcomment'),
-            (r'#\{\s*\n', Comment.Multiline, 'hashblockcomment'),
-            (r'[%#].*$', Comment),
-            (r'^\s*function\b', Keyword, 'deffunc'),
-
-            # from 'iskeyword' on hg changeset 8cc154f45e37
-            (words((
-                '__FILE__', '__LINE__', 'break', 'case', 'catch', 'classdef',
-                'continue', 'do', 'else', 'elseif', 'end', 'end_try_catch',
-                'end_unwind_protect', 'endclassdef', 'endevents', 'endfor',
-                'endfunction', 'endif', 'endmethods', 'endproperties', 'endswitch',
-                'endwhile', 'events', 'for', 'function', 'get', 'global', 'if',
-                'methods', 'otherwise', 'persistent', 'properties', 'return',
-                'set', 'static', 'switch', 'try', 'until', 'unwind_protect',
-                'unwind_protect_cleanup', 'while'), suffix=r'\b'),
-             Keyword),
-
-            (words(builtin_kw + command_kw + function_kw + loadable_kw + mapping_kw,
-                   suffix=r'\b'),  Name.Builtin),
-
-            (words(builtin_consts, suffix=r'\b'), Name.Constant),
-
-            # operators in Octave but not Matlab:
-            (r'-=|!=|!|/=|--', Operator),
-            # operators:
-            (r'-|==|~=|<|>|<=|>=|&&|&|~|\|\|?', Operator),
-            # operators in Octave but not Matlab requiring escape for re:
-            (r'\*=|\+=|\^=|\/=|\\=|\*\*|\+\+|\.\*\*', Operator),
-            # operators requiring escape for re:
-            (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-
-            # punctuation:
-            (r'[\[\](){}:@.,]', Punctuation),
-            (r'=|:|;', Punctuation),
-
-            (r'"[^"]*"', String),
-
-            (r'(\d+\.\d*|\d*\.\d+)([eEf][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eEf][+-]?[0-9]+', Number.Float),
-            (r'\d+', Number.Integer),
-
-            # quote can be transpose, instead of string:
-            # (not great, but handles common cases...)
-            (r'(?<=[\w)\].])\'+', Operator),
-            (r'(?|<=|>=|&&|&|~|\|\|?', Operator),
-            # operators requiring escape for re:
-            (r'\.\*|\*|\+|\.\^|\.\\|\.\/|\/|\\', Operator),
-
-            # punctuation:
-            (r'[\[\](){}@.,=:;]+', Punctuation),
-
-            (r'"[^"]*"', String),
-
-            # quote can be transpose, instead of string:
-            # (not great, but handles common cases...)
-            (r'(?<=[\w)\].])\'+', Operator),
-            (r'(?', r'<', r'|', r'!', r"'")
-
-    operator_words = ('and', 'or', 'not')
-
-    tokens = {
-        'root': [
-            (r'/\*', Comment.Multiline, 'comment'),
-            (r'"(?:[^"\\]|\\.)*"', String),
-            (r'\(|\)|\[|\]|\{|\}', Punctuation),
-            (r'[,;$]', Punctuation),
-            (words (constants), Name.Constant),
-            (words (keywords), Keyword),
-            (words (operators), Operator),
-            (words (operator_words), Operator.Word),
-            (r'''(?x)
-              ((?:[a-zA-Z_#][\w#]*|`[^`]*`)
-              (?:::[a-zA-Z_#][\w#]*|`[^`]*`)*)(\s*)([(])''',
-             bygroups(Name.Function, Text.Whitespace, Punctuation)),
-            (r'''(?x)
-              (?:[a-zA-Z_#%][\w#%]*|`[^`]*`)
-              (?:::[a-zA-Z_#%][\w#%]*|`[^`]*`)*''', Name.Variable),
-            (r'[-+]?(\d*\.\d+([bdefls][-+]?\d+)?|\d+(\.\d*)?[bdefls][-+]?\d+)', Number.Float),
-            (r'[-+]?\d+', Number.Integer),
-            (r'\s+', Text.Whitespace),
-            (r'.', Text)
-        ],
-        'comment': [
-            (r'[^*/]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ]
-    }
-
-    def analyse_text (text):
-        strength = 0.0
-        # Input expression terminator.
-        if re.search (r'\$\s*$', text, re.MULTILINE):
-            strength += 0.05
-        # Function definition operator.
-        if ':=' in text:
-            strength += 0.02
-        return strength
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/meson.py b/venv/lib/python3.11/site-packages/pygments/lexers/meson.py
deleted file mode 100644
index f74f719..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/meson.py
+++ /dev/null
@@ -1,140 +0,0 @@
-"""
-    pygments.lexers.meson
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Pygments lexer for the Meson build system
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Name, Number, Punctuation, Operator, \
-    Keyword, String, Whitespace
-
-__all__ = ['MesonLexer']
-
-
-class MesonLexer(RegexLexer):
-    """Meson language lexer.
-
-    The grammar definition use to transcribe the syntax was retrieved from
-    https://mesonbuild.com/Syntax.html#grammar for version 0.58.
-    Some of those definitions are improperly transcribed, so the Meson++
-    implementation was also checked: https://github.com/dcbaker/meson-plus-plus.
-
-    .. versionadded:: 2.10
-    """
-
-    # TODO String interpolation @VARNAME@ inner matches
-    # TODO keyword_arg: value inner matches
-
-    name = 'Meson'
-    url = 'https://mesonbuild.com/'
-    aliases = ['meson', 'meson.build']
-    filenames = ['meson.build', 'meson_options.txt']
-    mimetypes = ['text/x-meson']
-
-    tokens = {
-        'root': [
-            (r'#.*?$', Comment),
-            (r"'''.*'''", String.Single),
-            (r'[1-9][0-9]*', Number.Integer),
-            (r'0o[0-7]+', Number.Oct),
-            (r'0x[a-fA-F0-9]+', Number.Hex),
-            include('string'),
-            include('keywords'),
-            include('expr'),
-            (r'[a-zA-Z_][a-zA-Z_0-9]*', Name),
-            (r'\s+', Whitespace),
-        ],
-        'string': [
-            (r"[']{3}([']{0,2}([^\\']|\\(.|\n)))*[']{3}", String),
-            (r"'.*?(?`_.
-
-    .. versionadded:: 2.12.0
-    """
-
-    name = "MCFunction"
-    url = "https://minecraft.wiki/w/Commands"
-    aliases = ["mcfunction", "mcf"]
-    filenames = ["*.mcfunction"]
-    mimetypes = ["text/mcfunction"]
-
-    # Used to denotate the start of a block comment, borrowed from Github's mcfunction
-    _block_comment_prefix = "[>!]"
-
-    tokens = {
-        "root": [
-            include("names"),
-            include("comments"),
-            include("literals"),
-            include("whitespace"),
-            include("property"),
-            include("operators"),
-            include("selectors"),
-        ],
-
-        "names": [
-            # The start of a command (either beginning of line OR after the run keyword)
-            #  We don't encode a list of keywords since mods, plugins, or even pre-processors
-            #  may add new commands, so we have a 'close-enough' regex which catches them.
-            (r"^(\s*)([a-z_]+)", bygroups(Whitespace, Name.Builtin)),
-            (r"(?<=run)\s+[a-z_]+", Name.Builtin),
-
-            # UUID
-            (r"\b[0-9a-fA-F]+(?:-[0-9a-fA-F]+){4}\b", Name.Variable),
-            include("resource-name"),
-            # normal command names and scoreboards
-            #  there's no way to know the differences unfortuntely
-            (r"[A-Za-z_][\w.#%$]+", Keyword.Constant),
-            (r"[#%$][\w.#%$]+", Name.Variable.Magic),
-        ],
-
-        "resource-name": [
-            # resource names have to be lowercase
-            (r"#?[a-z_][a-z_.-]*:[a-z0-9_./-]+", Name.Function),
-            # similar to above except optional `:``
-            #  a `/` must be present "somewhere"
-            (r"#?[a-z0-9_\.\-]+\/[a-z0-9_\.\-\/]+", Name.Function),
-        ],
-
-        "whitespace": [
-            (r"\s+", Whitespace),
-        ],
-
-        "comments": [
-            (rf"^\s*(#{_block_comment_prefix})", Comment.Multiline,
-             ("comments.block", "comments.block.emphasized")),
-            (r"#.*$", Comment.Single),
-        ],
-        "comments.block": [
-            (rf"^\s*#{_block_comment_prefix}", Comment.Multiline,
-             "comments.block.emphasized"),
-            (r"^\s*#", Comment.Multiline, "comments.block.normal"),
-            default("#pop"),
-        ],
-        "comments.block.normal": [
-            include("comments.block.special"),
-            (r"\S+", Comment.Multiline),
-            (r"\n", Text, "#pop"),
-            include("whitespace"),
-        ],
-        "comments.block.emphasized": [
-            include("comments.block.special"),
-            (r"\S+", String.Doc),
-            (r"\n", Text, "#pop"),
-            include("whitespace"),
-        ],
-        "comments.block.special": [
-            # Params
-            (r"@\S+", Name.Decorator),
-
-            include("resource-name"),
-
-            # Scoreboard player names
-            (r"[#%$][\w.#%$]+", Name.Variable.Magic),
-        ],
-
-        "operators": [
-            (r"[\-~%^?!+*<>\\/|&=.]", Operator),
-        ],
-
-        "literals": [
-            (r"\.\.", Literal),
-            (r"(true|false)", Keyword.Pseudo),
-
-            # these are like unquoted strings and appear in many places
-            (r"[A-Za-z_]+", Name.Variable.Class),
-
-            (r"[0-7]b", Number.Byte),
-            (r"[+-]?\d*\.?\d+([eE]?[+-]?\d+)?[df]?\b", Number.Float),
-            (r"[+-]?\d+\b", Number.Integer),
-            (r'"', String.Double, "literals.string-double"),
-            (r"'", String.Single, "literals.string-single"),
-        ],
-        "literals.string-double": [
-            (r"\\.", String.Escape),
-            (r'[^\\"\n]+', String.Double),
-            (r'"', String.Double, "#pop"),
-        ],
-        "literals.string-single": [
-            (r"\\.", String.Escape),
-            (r"[^\\'\n]+", String.Single),
-            (r"'", String.Single, "#pop"),
-        ],
-
-        "selectors": [
-            (r"@[a-z]", Name.Variable),
-        ],
- 
-
-        ## Generic Property Container
-        # There are several, differing instances where the language accepts
-        #  specific contained keys or contained key, value pairings.
-        # 
-        # Property Maps:
-        # - Starts with either `[` or `{`
-        # - Key separated by `:` or `=`
-        # - Deliminated by `,`
-        # 
-        # Property Lists:
-        # - Starts with `[`
-        # - Deliminated by `,`
-        # 
-        # For simplicity, these patterns match a generic, nestable structure
-        #  which follow a key, value pattern. For normal lists, there's only keys.
-        # This allow some "illegal" structures, but we'll accept those for
-        #  sake of simplicity
-        # 
-        # Examples:
-        # - `[facing=up, powered=true]` (blockstate)
-        # - `[name="hello world", nbt={key: 1b}]` (selector + nbt)
-        # - `[{"text": "value"}, "literal"]` (json)
-        ##
-        "property": [
-            # This state gets included in root and also several substates
-            # We do this to shortcut the starting of new properties
-            #  within other properties. Lists can have sublists and compounds
-            #  and values can start a new property (see the `difficult_1.txt`
-            #  snippet).
-            (r"\{", Punctuation, ("property.curly", "property.key")),
-            (r"\[", Punctuation, ("property.square", "property.key")),
-        ],
-        "property.curly": [
-            include("whitespace"),
-            include("property"),
-            (r"\}", Punctuation, "#pop"),
-        ],
-        "property.square": [
-            include("whitespace"),
-            include("property"),
-            (r"\]", Punctuation, "#pop"),
-
-            # lists can have sequences of items
-            (r",", Punctuation),
-        ],
-        "property.key": [
-            include("whitespace"),
-
-            # resource names (for advancements)
-            #  can omit `:` to default `minecraft:`
-            # must check if there is a future equals sign if `:` is in the name
-            (r"#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+(?=\s*\=)", Name.Attribute, "property.delimiter"),
-            (r"#?[a-z_][a-z0-9_\.\-/]+", Name.Attribute, "property.delimiter"),
-
-            # unquoted NBT key
-            (r"[A-Za-z_\-\+]+", Name.Attribute, "property.delimiter"),
-
-            # quoted JSON or NBT key
-            (r'"', Name.Attribute, "property.delimiter", "literals.string-double"),
-            (r"'", Name.Attribute, "property.delimiter", "literals.string-single"),
-
-            # index for a list
-            (r"-?\d+", Number.Integer, "property.delimiter"),
-
-            default("#pop"),
-        ],
-        "property.key.string-double": [
-            (r"\\.", String.Escape),
-            (r'[^\\"\n]+', Name.Attribute),
-            (r'"', Name.Attribute, "#pop"),
-        ],
-        "property.key.string-single": [
-            (r"\\.", String.Escape),
-            (r"[^\\'\n]+", Name.Attribute),
-            (r"'", Name.Attribute, "#pop"),
-        ],
-        "property.delimiter": [
-            include("whitespace"),
-            
-            (r"[:=]!?", Punctuation, "property.value"),
-            (r",", Punctuation),
-
-            default("#pop"),
-        ],
-        "property.value": [
-            include("whitespace"),
-
-            # unquoted resource names are valid literals here
-            (r"#?[a-z_][a-z_\.\-]*\:[a-z0-9_\.\-/]+", Name.Tag),
-            (r"#?[a-z_][a-z0-9_\.\-/]+", Name.Tag),
-
-            include("literals"),
-            include("property"),
-
-            default("#pop"),
-        ],
-    }
-
-
-class MCSchemaLexer(RegexLexer):
-    """Lexer for Minecraft Add-ons data Schemas, an interface structure standard used in Minecraft
-
-    .. versionadded:: 2.14.0
-    """
-    
-    name = 'MCSchema'
-    url = 'https://learn.microsoft.com/en-us/minecraft/creator/reference/content/schemasreference/'
-    aliases = ['mcschema']
-    filenames = ['*.mcschema']
-    mimetypes = ['text/mcschema']
-
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Whitespace),
-            (r'//.*?$', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gimuysd]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            default('#pop')
-        ],
-        'badregex': [
-            (r'\n', Whitespace, '#pop')
-        ],
-        'singlestring': [
-            (r'\\.', String.Escape),
-            (r"'", String.Single, '#pop'),
-            (r"[^\\']+", String.Single),
-        ],
-        'doublestring': [
-            (r'\\.', String.Escape),
-            (r'"', String.Double, '#pop'),
-            (r'[^\\"]+', String.Double),
-        ],
-        'root': [
-            (r'^(?=\s|/|', Comment, '#pop'),
-            (r'[^\-]+|-', Comment),
-        ],
-    }
-
-
-class ReasonLexer(RegexLexer):
-    """
-    For the ReasonML language.
-
-    .. versionadded:: 2.6
-    """
-
-    name = 'ReasonML'
-    url = 'https://reasonml.github.io/'
-    aliases = ['reasonml', 'reason']
-    filenames = ['*.re', '*.rei']
-    mimetypes = ['text/x-reasonml']
-
-    keywords = (
-        'as', 'assert', 'begin', 'class', 'constraint', 'do', 'done', 'downto',
-        'else', 'end', 'exception', 'external', 'false', 'for', 'fun', 'esfun',
-        'function', 'functor', 'if', 'in', 'include', 'inherit', 'initializer', 'lazy',
-        'let', 'switch', 'module', 'pub', 'mutable', 'new', 'nonrec', 'object', 'of',
-        'open', 'pri', 'rec', 'sig', 'struct', 'then', 'to', 'true', 'try',
-        'type', 'val', 'virtual', 'when', 'while', 'with',
-    )
-    keyopts = (
-        '!=', '#', '&', '&&', r'\(', r'\)', r'\*', r'\+', ',', '-',
-        r'-\.', '=>', r'\.', r'\.\.', r'\.\.\.', ':', '::', ':=', ':>', ';', ';;', '<',
-        '<-', '=', '>', '>]', r'>\}', r'\?', r'\?\?', r'\[', r'\[<', r'\[>',
-        r'\[\|', ']', '_', '`', r'\{', r'\{<', r'\|', r'\|\|', r'\|]', r'\}', '~'
-    )
-
-    operators = r'[!$%&*+\./:<=>?@^|~-]'
-    word_operators = ('and', 'asr', 'land', 'lor', 'lsl', 'lsr', 'lxor', 'mod', 'or')
-    prefix_syms = r'[!?~]'
-    infix_syms = r'[=<>@^|&+\*/$%-]'
-    primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
-    tokens = {
-        'escape-sequence': [
-            (r'\\[\\"\'ntbr]', String.Escape),
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-        ],
-        'root': [
-            (r'\s+', Text),
-            (r'false|true|\(\)|\[\]', Name.Builtin.Pseudo),
-            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
-            (r'\b([A-Z][\w\']*)', Name.Class),
-            (r'//.*?\n', Comment.Single),
-            (r'\/\*(?!/)', Comment.Multiline, 'comment'),
-            (r'\b(%s)\b' % '|'.join(keywords), Keyword),
-            (r'(%s)' % '|'.join(keyopts[::-1]), Operator.Word),
-            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-            (r'\b(%s)\b' % '|'.join(word_operators), Operator.Word),
-            (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
-            (r"[^\W\d][\w']*", Name),
-
-            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
-            (r'0[oO][0-7][0-7_]*', Number.Oct),
-            (r'0[bB][01][01_]*', Number.Bin),
-            (r'\d[\d_]*', Number.Integer),
-
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
-             String.Char),
-            (r"'.'", String.Char),
-            (r"'", Keyword),
-
-            (r'"', String.Double, 'string'),
-
-            (r'[~?][a-z][\w\']*:', Name.Variable),
-        ],
-        'comment': [
-            (r'[^/*]+', Comment.Multiline),
-            (r'\/\*', Comment.Multiline, '#push'),
-            (r'\*\/', Comment.Multiline, '#pop'),
-            (r'\*', Comment.Multiline),
-        ],
-        'string': [
-            (r'[^\\"]+', String.Double),
-            include('escape-sequence'),
-            (r'\\\n', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'dotted': [
-            (r'\s+', Text),
-            (r'\.', Punctuation),
-            (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
-            (r'[A-Z][\w\']*', Name.Class, '#pop'),
-            (r'[a-z_][\w\']*', Name, '#pop'),
-            default('#pop'),
-        ],
-    }
-
-
-class FStarLexer(RegexLexer):
-    """
-    For the F* language.
-    .. versionadded:: 2.7
-    """
-
-    name = 'FStar'
-    url = 'https://www.fstar-lang.org/'
-    aliases = ['fstar']
-    filenames = ['*.fst', '*.fsti']
-    mimetypes = ['text/x-fstar']
-
-    keywords = (
-        'abstract', 'attributes', 'noeq', 'unopteq', 'and'
-        'begin', 'by', 'default', 'effect', 'else', 'end', 'ensures',
-        'exception', 'exists', 'false', 'forall', 'fun', 'function', 'if',
-        'in', 'include', 'inline', 'inline_for_extraction', 'irreducible',
-        'logic', 'match', 'module', 'mutable', 'new', 'new_effect', 'noextract',
-        'of', 'open', 'opaque', 'private', 'range_of', 'reifiable',
-        'reify', 'reflectable', 'requires', 'set_range_of', 'sub_effect',
-        'synth', 'then', 'total', 'true', 'try', 'type', 'unfold', 'unfoldable',
-        'val', 'when', 'with', 'not'
-    )
-    decl_keywords = ('let', 'rec')
-    assume_keywords = ('assume', 'admit', 'assert', 'calc')
-    keyopts = (
-        r'~', r'-', r'/\\', r'\\/', r'<:', r'<@', r'\(\|', r'\|\)', r'#', r'u#',
-        r'&', r'\(', r'\)', r'\(\)', r',', r'~>', r'->', r'<-', r'<--', r'<==>',
-        r'==>', r'\.', r'\?', r'\?\.', r'\.\[', r'\.\(', r'\.\(\|', r'\.\[\|',
-        r'\{:pattern', r':', r'::', r':=', r';', r';;', r'=', r'%\[', r'!\{',
-        r'\[', r'\[@', r'\[\|', r'\|>', r'\]', r'\|\]', r'\{', r'\|', r'\}', r'\$'
-    )
-
-    operators = r'[!$%&*+\./:<=>?@^|~-]'
-    prefix_syms = r'[!?~]'
-    infix_syms = r'[=<>@^|&+\*/$%-]'
-    primitives = ('unit', 'int', 'float', 'bool', 'string', 'char', 'list', 'array')
-
-    tokens = {
-        'escape-sequence': [
-            (r'\\[\\"\'ntbr]', String.Escape),
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-        ],
-        'root': [
-            (r'\s+', Text),
-            (r'false|true|False|True|\(\)|\[\]', Name.Builtin.Pseudo),
-            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Namespace, 'dotted'),
-            (r'\b([A-Z][\w\']*)', Name.Class),
-            (r'\(\*(?![)])', Comment, 'comment'),
-            (r'\/\/.+$', Comment),
-            (r'\b(%s)\b' % '|'.join(keywords), Keyword),
-            (r'\b(%s)\b' % '|'.join(assume_keywords), Name.Exception),
-            (r'\b(%s)\b' % '|'.join(decl_keywords), Keyword.Declaration),
-            (r'(%s)' % '|'.join(keyopts[::-1]), Operator),
-            (r'(%s|%s)?%s' % (infix_syms, prefix_syms, operators), Operator),
-            (r'\b(%s)\b' % '|'.join(primitives), Keyword.Type),
-
-            (r"[^\W\d][\w']*", Name),
-
-            (r'-?\d[\d_]*(.[\d_]*)?([eE][+\-]?\d[\d_]*)', Number.Float),
-            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
-            (r'0[oO][0-7][0-7_]*', Number.Oct),
-            (r'0[bB][01][01_]*', Number.Bin),
-            (r'\d[\d_]*', Number.Integer),
-
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
-             String.Char),
-            (r"'.'", String.Char),
-            (r"'", Keyword),  # a stray quote is another syntax element
-            (r"\`([\w\'.]+)\`", Operator.Word),  # for infix applications
-            (r"\`", Keyword),  # for quoting
-            (r'"', String.Double, 'string'),
-
-            (r'[~?][a-z][\w\']*:', Name.Variable),
-        ],
-        'comment': [
-            (r'[^(*)]+', Comment),
-            (r'\(\*', Comment, '#push'),
-            (r'\*\)', Comment, '#pop'),
-            (r'[(*)]', Comment),
-        ],
-        'string': [
-            (r'[^\\"]+', String.Double),
-            include('escape-sequence'),
-            (r'\\\n', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'dotted': [
-            (r'\s+', Text),
-            (r'\.', Punctuation),
-            (r'[A-Z][\w\']*(?=\s*\.)', Name.Namespace),
-            (r'[A-Z][\w\']*', Name.Class, '#pop'),
-            (r'[a-z_][\w\']*', Name, '#pop'),
-            default('#pop'),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/modeling.py b/venv/lib/python3.11/site-packages/pygments/lexers/modeling.py
deleted file mode 100644
index e247680..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/modeling.py
+++ /dev/null
@@ -1,369 +0,0 @@
-"""
-    pygments.lexers.modeling
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for modeling languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Whitespace
-
-from pygments.lexers.html import HtmlLexer
-from pygments.lexers import _stan_builtins
-
-__all__ = ['ModelicaLexer', 'BugsLexer', 'JagsLexer', 'StanLexer']
-
-
-class ModelicaLexer(RegexLexer):
-    """
-    For Modelica source code.
-
-    .. versionadded:: 1.1
-    """
-    name = 'Modelica'
-    url = 'http://www.modelica.org/'
-    aliases = ['modelica']
-    filenames = ['*.mo']
-    mimetypes = ['text/x-modelica']
-
-    flags = re.DOTALL | re.MULTILINE
-
-    _name = r"(?:'(?:[^\\']|\\.)+'|[a-zA-Z_]\w*)"
-
-    tokens = {
-        'whitespace': [
-            (r'[\s\ufeff]+', Text),
-            (r'//[^\n]*\n?', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'root': [
-            include('whitespace'),
-            (r'"', String.Double, 'string'),
-            (r'[()\[\]{},;]+', Punctuation),
-            (r'\.?[*^/+-]|\.|<>|[<>:=]=?', Operator),
-            (r'\d+(\.?\d*[eE][-+]?\d+|\.\d*)', Number.Float),
-            (r'\d+', Number.Integer),
-            (r'(abs|acos|actualStream|array|asin|assert|AssertionLevel|atan|'
-             r'atan2|backSample|Boolean|cardinality|cat|ceil|change|Clock|'
-             r'Connections|cos|cosh|cross|delay|diagonal|div|edge|exp|'
-             r'ExternalObject|fill|floor|getInstanceName|hold|homotopy|'
-             r'identity|inStream|integer|Integer|interval|inverse|isPresent|'
-             r'linspace|log|log10|matrix|max|min|mod|ndims|noClock|noEvent|'
-             r'ones|outerProduct|pre|previous|product|Real|reinit|rem|rooted|'
-             r'sample|scalar|semiLinear|shiftSample|sign|sin|sinh|size|skew|'
-             r'smooth|spatialDistribution|sqrt|StateSelect|String|subSample|'
-             r'sum|superSample|symmetric|tan|tanh|terminal|terminate|time|'
-             r'transpose|vector|zeros)\b', Name.Builtin),
-            (r'(algorithm|annotation|break|connect|constant|constrainedby|der|'
-             r'discrete|each|else|elseif|elsewhen|encapsulated|enumeration|'
-             r'equation|exit|expandable|extends|external|firstTick|final|flow|for|if|'
-             r'import|impure|in|initial|inner|input|interval|loop|nondiscrete|outer|'
-             r'output|parameter|partial|protected|public|pure|redeclare|'
-             r'replaceable|return|stream|then|when|while)\b',
-             Keyword.Reserved),
-            (r'(and|not|or)\b', Operator.Word),
-            (r'(block|class|connector|end|function|model|operator|package|'
-             r'record|type)\b', Keyword.Reserved, 'class'),
-            (r'(false|true)\b', Keyword.Constant),
-            (r'within\b', Keyword.Reserved, 'package-prefix'),
-            (_name, Name)
-        ],
-        'class': [
-            include('whitespace'),
-            (r'(function|record)\b', Keyword.Reserved),
-            (r'(if|for|when|while)\b', Keyword.Reserved, '#pop'),
-            (_name, Name.Class, '#pop'),
-            default('#pop')
-        ],
-        'package-prefix': [
-            include('whitespace'),
-            (_name, Name.Namespace, '#pop'),
-            default('#pop')
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'\\[\'"?\\abfnrtv]', String.Escape),
-            (r'(?i)<\s*html\s*>([^\\"]|\\.)+?(<\s*/\s*html\s*>|(?="))',
-             using(HtmlLexer)),
-            (r'<|\\?[^"\\<]+', String.Double)
-        ]
-    }
-
-
-class BugsLexer(RegexLexer):
-    """
-    Pygments Lexer for OpenBugs and WinBugs
-    models.
-
-    .. versionadded:: 1.6
-    """
-
-    name = 'BUGS'
-    aliases = ['bugs', 'winbugs', 'openbugs']
-    filenames = ['*.bug']
-
-    _FUNCTIONS = (
-        # Scalar functions
-        'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
-        'cloglog', 'cos', 'cosh', 'cumulative', 'cut', 'density', 'deviance',
-        'equals', 'expr', 'gammap', 'ilogit', 'icloglog', 'integral', 'log',
-        'logfact', 'loggam', 'logit', 'max', 'min', 'phi', 'post.p.value',
-        'pow', 'prior.p.value', 'probit', 'replicate.post', 'replicate.prior',
-        'round', 'sin', 'sinh', 'solution', 'sqrt', 'step', 'tan', 'tanh',
-        'trunc',
-        # Vector functions
-        'inprod', 'interp.lin', 'inverse', 'logdet', 'mean', 'eigen.vals',
-        'ode', 'prod', 'p.valueM', 'rank', 'ranked', 'replicate.postM',
-        'sd', 'sort', 'sum',
-        # Special
-        'D', 'I', 'F', 'T', 'C')
-    """ OpenBUGS built-in functions
-
-    From http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAII
-
-    This also includes
-
-    - T, C, I : Truncation and censoring.
-      ``T`` and ``C`` are in OpenBUGS. ``I`` in WinBUGS.
-    - D : ODE
-    - F : Functional http://www.openbugs.info/Examples/Functionals.html
-
-    """
-
-    _DISTRIBUTIONS = ('dbern', 'dbin', 'dcat', 'dnegbin', 'dpois',
-                      'dhyper', 'dbeta', 'dchisqr', 'ddexp', 'dexp',
-                      'dflat', 'dgamma', 'dgev', 'df', 'dggamma', 'dgpar',
-                      'dloglik', 'dlnorm', 'dlogis', 'dnorm', 'dpar',
-                      'dt', 'dunif', 'dweib', 'dmulti', 'ddirch', 'dmnorm',
-                      'dmt', 'dwish')
-    """ OpenBUGS built-in distributions
-
-    Functions from
-    http://www.openbugs.info/Manuals/ModelSpecification.html#ContentsAI
-    """
-
-    tokens = {
-        'whitespace': [
-            (r"\s+", Text),
-        ],
-        'comments': [
-            # Comments
-            (r'#.*$', Comment.Single),
-        ],
-        'root': [
-            # Comments
-            include('comments'),
-            include('whitespace'),
-            # Block start
-            (r'(model)(\s+)(\{)',
-             bygroups(Keyword.Namespace, Text, Punctuation)),
-            # Reserved Words
-            (r'(for|in)(?![\w.])', Keyword.Reserved),
-            # Built-in Functions
-            (r'(%s)(?=\s*\()'
-             % r'|'.join(_FUNCTIONS + _DISTRIBUTIONS),
-             Name.Builtin),
-            # Regular variable names
-            (r'[A-Za-z][\w.]*', Name),
-            # Number Literals
-            (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
-            # Punctuation
-            (r'\[|\]|\(|\)|:|,|;', Punctuation),
-            # Assignment operators
-            # SLexer makes these tokens Operators.
-            (r'<-|~', Operator),
-            # Infix and prefix operators
-            (r'\+|-|\*|/', Operator),
-            # Block
-            (r'[{}]', Punctuation),
-        ]
-    }
-
-    def analyse_text(text):
-        if re.search(r"^\s*model\s*{", text, re.M):
-            return 0.7
-        else:
-            return 0.0
-
-
-class JagsLexer(RegexLexer):
-    """
-    Pygments Lexer for JAGS.
-
-    .. versionadded:: 1.6
-    """
-
-    name = 'JAGS'
-    aliases = ['jags']
-    filenames = ['*.jag', '*.bug']
-
-    # JAGS
-    _FUNCTIONS = (
-        'abs', 'arccos', 'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctanh',
-        'cos', 'cosh', 'cloglog',
-        'equals', 'exp', 'icloglog', 'ifelse', 'ilogit', 'log', 'logfact',
-        'loggam', 'logit', 'phi', 'pow', 'probit', 'round', 'sin', 'sinh',
-        'sqrt', 'step', 'tan', 'tanh', 'trunc', 'inprod', 'interp.lin',
-        'logdet', 'max', 'mean', 'min', 'prod', 'sum', 'sd', 'inverse',
-        'rank', 'sort', 't', 'acos', 'acosh', 'asin', 'asinh', 'atan',
-        # Truncation/Censoring (should I include)
-        'T', 'I')
-    # Distributions with density, probability and quartile functions
-    _DISTRIBUTIONS = tuple('[dpq]%s' % x for x in
-                           ('bern', 'beta', 'dchiqsqr', 'ddexp', 'dexp',
-                            'df', 'gamma', 'gen.gamma', 'logis', 'lnorm',
-                            'negbin', 'nchisqr', 'norm', 'par', 'pois', 'weib'))
-    # Other distributions without density and probability
-    _OTHER_DISTRIBUTIONS = (
-        'dt', 'dunif', 'dbetabin', 'dbern', 'dbin', 'dcat', 'dhyper',
-        'ddirch', 'dmnorm', 'dwish', 'dmt', 'dmulti', 'dbinom', 'dchisq',
-        'dnbinom', 'dweibull', 'ddirich')
-
-    tokens = {
-        'whitespace': [
-            (r"\s+", Text),
-        ],
-        'names': [
-            # Regular variable names
-            (r'[a-zA-Z][\w.]*\b', Name),
-        ],
-        'comments': [
-            # do not use stateful comments
-            (r'(?s)/\*.*?\*/', Comment.Multiline),
-            # Comments
-            (r'#.*$', Comment.Single),
-        ],
-        'root': [
-            # Comments
-            include('comments'),
-            include('whitespace'),
-            # Block start
-            (r'(model|data)(\s+)(\{)',
-             bygroups(Keyword.Namespace, Text, Punctuation)),
-            (r'var(?![\w.])', Keyword.Declaration),
-            # Reserved Words
-            (r'(for|in)(?![\w.])', Keyword.Reserved),
-            # Builtins
-            # Need to use lookahead because . is a valid char
-            (r'(%s)(?=\s*\()' % r'|'.join(_FUNCTIONS
-                                          + _DISTRIBUTIONS
-                                          + _OTHER_DISTRIBUTIONS),
-             Name.Builtin),
-            # Names
-            include('names'),
-            # Number Literals
-            (r'[-+]?[0-9]*\.?[0-9]+([eE][-+]?[0-9]+)?', Number),
-            (r'\[|\]|\(|\)|:|,|;', Punctuation),
-            # Assignment operators
-            (r'<-|~', Operator),
-            # # JAGS includes many more than OpenBUGS
-            (r'\+|-|\*|\/|\|\|[&]{2}|[<>=]=?|\^|%.*?%', Operator),
-            (r'[{}]', Punctuation),
-        ]
-    }
-
-    def analyse_text(text):
-        if re.search(r'^\s*model\s*\{', text, re.M):
-            if re.search(r'^\s*data\s*\{', text, re.M):
-                return 0.9
-            elif re.search(r'^\s*var', text, re.M):
-                return 0.9
-            else:
-                return 0.3
-        else:
-            return 0
-
-
-class StanLexer(RegexLexer):
-    """Pygments Lexer for Stan models.
-
-    The Stan modeling language is specified in the *Stan Modeling Language
-    User's Guide and Reference Manual, v2.17.0*,
-    `pdf `__.
-
-    .. versionadded:: 1.6
-    """
-
-    name = 'Stan'
-    aliases = ['stan']
-    filenames = ['*.stan']
-
-    tokens = {
-        'whitespace': [
-            (r"\s+", Text),
-        ],
-        'comments': [
-            (r'(?s)/\*.*?\*/', Comment.Multiline),
-            # Comments
-            (r'(//|#).*$', Comment.Single),
-        ],
-        'root': [
-            (r'"[^"]*"', String),
-            # Comments
-            include('comments'),
-            # block start
-            include('whitespace'),
-            # Block start
-            (r'(%s)(\s*)(\{)' %
-             r'|'.join(('functions', 'data', r'transformed\s+?data',
-                        'parameters', r'transformed\s+parameters',
-                        'model', r'generated\s+quantities')),
-             bygroups(Keyword.Namespace, Text, Punctuation)),
-            # target keyword
-            (r'target\s*\+=', Keyword),
-            # Reserved Words
-            (r'(%s)\b' % r'|'.join(_stan_builtins.KEYWORDS), Keyword),
-            # Truncation
-            (r'T(?=\s*\[)', Keyword),
-            # Data types
-            (r'(%s)\b' % r'|'.join(_stan_builtins.TYPES), Keyword.Type),
-             # < should be punctuation, but elsewhere I can't tell if it is in
-             # a range constraint
-            (r'(<)(\s*)(upper|lower|offset|multiplier)(\s*)(=)',
-             bygroups(Operator, Whitespace, Keyword, Whitespace, Punctuation)),
-            (r'(,)(\s*)(upper)(\s*)(=)',
-             bygroups(Punctuation, Whitespace, Keyword, Whitespace, Punctuation)),
-            # Punctuation
-            (r"[;,\[\]()]", Punctuation),
-            # Builtin
-            (r'(%s)(?=\s*\()' % '|'.join(_stan_builtins.FUNCTIONS), Name.Builtin),
-            (r'(~)(\s*)(%s)(?=\s*\()' % '|'.join(_stan_builtins.DISTRIBUTIONS),
-                bygroups(Operator, Whitespace, Name.Builtin)),
-            # Special names ending in __, like lp__
-            (r'[A-Za-z]\w*__\b', Name.Builtin.Pseudo),
-            (r'(%s)\b' % r'|'.join(_stan_builtins.RESERVED), Keyword.Reserved),
-            # user-defined functions
-            (r'[A-Za-z]\w*(?=\s*\()]', Name.Function),
-            # Imaginary Literals
-            (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?i', Number.Float),
-            (r'\.[0-9]+([eE][+-]?[0-9]+)?i', Number.Float),
-            (r'[0-9]+i', Number.Float),
-            # Real Literals
-            (r'[0-9]+(\.[0-9]*)?([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\.[0-9]+([eE][+-]?[0-9]+)?', Number.Float),
-            # Integer Literals
-            (r'[0-9]+', Number.Integer),
-            # Regular variable names
-            (r'[A-Za-z]\w*\b', Name),
-            # Assignment operators
-            (r'<-|(?:\+|-|\.?/|\.?\*|=)?=|~', Operator),
-            # Infix, prefix and postfix operators (and = )
-            (r"\+|-|\.?\*|\.?/|\\|'|\.?\^|!=?|<=?|>=?|\|\||&&|%|\?|:|%/%|!", Operator),
-            # Block delimiters
-            (r'[{}]', Punctuation),
-            # Distribution |
-            (r'\|', Punctuation)
-        ]
-    }
-
-    def analyse_text(text):
-        if re.search(r'^\s*parameters\s*\{', text, re.M):
-            return 1.0
-        else:
-            return 0.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/modula2.py b/venv/lib/python3.11/site-packages/pygments/lexers/modula2.py
deleted file mode 100644
index 8bd4765..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/modula2.py
+++ /dev/null
@@ -1,1580 +0,0 @@
-"""
-    pygments.lexers.modula2
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Multi-Dialect Lexer for Modula-2.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, \
-    String, Number, Punctuation, Error
-
-__all__ = ['Modula2Lexer']
-
-
-# Multi-Dialect Modula-2 Lexer
-class Modula2Lexer(RegexLexer):
-    """
-    For Modula-2 source code.
-
-    The Modula-2 lexer supports several dialects.  By default, it operates in
-    fallback mode, recognising the *combined* literals, punctuation symbols
-    and operators of all supported dialects, and the *combined* reserved words
-    and builtins of PIM Modula-2, ISO Modula-2 and Modula-2 R10, while not
-    differentiating between library defined identifiers.
-
-    To select a specific dialect, a dialect option may be passed
-    or a dialect tag may be embedded into a source file.
-
-    Dialect Options:
-
-    `m2pim`
-        Select PIM Modula-2 dialect.
-    `m2iso`
-        Select ISO Modula-2 dialect.
-    `m2r10`
-        Select Modula-2 R10 dialect.
-    `objm2`
-        Select Objective Modula-2 dialect.
-
-    The PIM and ISO dialect options may be qualified with a language extension.
-
-    Language Extensions:
-
-    `+aglet`
-        Select Aglet Modula-2 extensions, available with m2iso.
-    `+gm2`
-        Select GNU Modula-2 extensions, available with m2pim.
-    `+p1`
-        Select p1 Modula-2 extensions, available with m2iso.
-    `+xds`
-        Select XDS Modula-2 extensions, available with m2iso.
-
-
-    Passing a Dialect Option via Unix Commandline Interface
-
-    Dialect options may be passed to the lexer using the `dialect` key.
-    Only one such option should be passed. If multiple dialect options are
-    passed, the first valid option is used, any subsequent options are ignored.
-
-    Examples:
-
-    `$ pygmentize -O full,dialect=m2iso -f html -o /path/to/output /path/to/input`
-        Use ISO dialect to render input to HTML output
-    `$ pygmentize -O full,dialect=m2iso+p1 -f rtf -o /path/to/output /path/to/input`
-        Use ISO dialect with p1 extensions to render input to RTF output
-
-
-    Embedding a Dialect Option within a source file
-
-    A dialect option may be embedded in a source file in form of a dialect
-    tag, a specially formatted comment that specifies a dialect option.
-
-    Dialect Tag EBNF::
-
-       dialectTag :
-           OpeningCommentDelim Prefix dialectOption ClosingCommentDelim ;
-
-       dialectOption :
-           'm2pim' | 'm2iso' | 'm2r10' | 'objm2' |
-           'm2iso+aglet' | 'm2pim+gm2' | 'm2iso+p1' | 'm2iso+xds' ;
-
-       Prefix : '!' ;
-
-       OpeningCommentDelim : '(*' ;
-
-       ClosingCommentDelim : '*)' ;
-
-    No whitespace is permitted between the tokens of a dialect tag.
-
-    In the event that a source file contains multiple dialect tags, the first
-    tag that contains a valid dialect option will be used and any subsequent
-    dialect tags will be ignored.  Ideally, a dialect tag should be placed
-    at the beginning of a source file.
-
-    An embedded dialect tag overrides a dialect option set via command line.
-
-    Examples:
-
-    ``(*!m2r10*) DEFINITION MODULE Foobar; ...``
-        Use Modula2 R10 dialect to render this source file.
-    ``(*!m2pim+gm2*) DEFINITION MODULE Bazbam; ...``
-        Use PIM dialect with GNU extensions to render this source file.
-
-
-    Algol Publication Mode:
-
-    In Algol publication mode, source text is rendered for publication of
-    algorithms in scientific papers and academic texts, following the format
-    of the Revised Algol-60 Language Report.  It is activated by passing
-    one of two corresponding styles as an option:
-
-    `algol`
-        render reserved words lowercase underline boldface
-        and builtins lowercase boldface italic
-    `algol_nu`
-        render reserved words lowercase boldface (no underlining)
-        and builtins lowercase boldface italic
-
-    The lexer automatically performs the required lowercase conversion when
-    this mode is activated.
-
-    Example:
-
-    ``$ pygmentize -O full,style=algol -f latex -o /path/to/output /path/to/input``
-        Render input file in Algol publication mode to LaTeX output.
-
-
-    Rendering Mode of First Class ADT Identifiers:
-
-    The rendering of standard library first class ADT identifiers is controlled
-    by option flag "treat_stdlib_adts_as_builtins".
-
-    When this option is turned on, standard library ADT identifiers are rendered
-    as builtins.  When it is turned off, they are rendered as ordinary library
-    identifiers.
-
-    `treat_stdlib_adts_as_builtins` (default: On)
-
-    The option is useful for dialects that support ADTs as first class objects
-    and provide ADTs in the standard library that would otherwise be built-in.
-
-    At present, only Modula-2 R10 supports library ADTs as first class objects
-    and therefore, no ADT identifiers are defined for any other dialects.
-
-    Example:
-
-    ``$ pygmentize -O full,dialect=m2r10,treat_stdlib_adts_as_builtins=Off ...``
-        Render standard library ADTs as ordinary library types.
-
-    .. versionadded:: 1.3
-
-    .. versionchanged:: 2.1
-       Added multi-dialect support.
-    """
-    name = 'Modula-2'
-    url = 'http://www.modula2.org/'
-    aliases = ['modula2', 'm2']
-    filenames = ['*.def', '*.mod']
-    mimetypes = ['text/x-modula2']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'whitespace': [
-            (r'\n+', Text),  # blank lines
-            (r'\s+', Text),  # whitespace
-        ],
-        'dialecttags': [
-            # PIM Dialect Tag
-            (r'\(\*!m2pim\*\)', Comment.Special),
-            # ISO Dialect Tag
-            (r'\(\*!m2iso\*\)', Comment.Special),
-            # M2R10 Dialect Tag
-            (r'\(\*!m2r10\*\)', Comment.Special),
-            # ObjM2 Dialect Tag
-            (r'\(\*!objm2\*\)', Comment.Special),
-            # Aglet Extensions Dialect Tag
-            (r'\(\*!m2iso\+aglet\*\)', Comment.Special),
-            # GNU Extensions Dialect Tag
-            (r'\(\*!m2pim\+gm2\*\)', Comment.Special),
-            # p1 Extensions Dialect Tag
-            (r'\(\*!m2iso\+p1\*\)', Comment.Special),
-            # XDS Extensions Dialect Tag
-            (r'\(\*!m2iso\+xds\*\)', Comment.Special),
-        ],
-        'identifiers': [
-            (r'([a-zA-Z_$][\w$]*)', Name),
-        ],
-        'prefixed_number_literals': [
-            #
-            # Base-2, whole number
-            (r'0b[01]+(\'[01]+)*', Number.Bin),
-            #
-            # Base-16, whole number
-            (r'0[ux][0-9A-F]+(\'[0-9A-F]+)*', Number.Hex),
-        ],
-        'plain_number_literals': [
-            #
-            # Base-10, real number with exponent
-            (r'[0-9]+(\'[0-9]+)*'  # integral part
-             r'\.[0-9]+(\'[0-9]+)*'  # fractional part
-             r'[eE][+-]?[0-9]+(\'[0-9]+)*',  # exponent
-             Number.Float),
-            #
-            # Base-10, real number without exponent
-            (r'[0-9]+(\'[0-9]+)*'  # integral part
-             r'\.[0-9]+(\'[0-9]+)*',  # fractional part
-             Number.Float),
-            #
-            # Base-10, whole number
-            (r'[0-9]+(\'[0-9]+)*', Number.Integer),
-        ],
-        'suffixed_number_literals': [
-            #
-            # Base-8, whole number
-            (r'[0-7]+B', Number.Oct),
-            #
-            # Base-8, character code
-            (r'[0-7]+C', Number.Oct),
-            #
-            # Base-16, number
-            (r'[0-9A-F]+H', Number.Hex),
-        ],
-        'string_literals': [
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
-        ],
-        'digraph_operators': [
-            # Dot Product Operator
-            (r'\*\.', Operator),
-            # Array Concatenation Operator
-            (r'\+>', Operator),  # M2R10 + ObjM2
-            # Inequality Operator
-            (r'<>', Operator),  # ISO + PIM
-            # Less-Or-Equal, Subset
-            (r'<=', Operator),
-            # Greater-Or-Equal, Superset
-            (r'>=', Operator),
-            # Identity Operator
-            (r'==', Operator),  # M2R10 + ObjM2
-            # Type Conversion Operator
-            (r'::', Operator),  # M2R10 + ObjM2
-            # Assignment Symbol
-            (r':=', Operator),
-            # Postfix Increment Mutator
-            (r'\+\+', Operator),  # M2R10 + ObjM2
-            # Postfix Decrement Mutator
-            (r'--', Operator),  # M2R10 + ObjM2
-        ],
-        'unigraph_operators': [
-            # Arithmetic Operators
-            (r'[+-]', Operator),
-            (r'[*/]', Operator),
-            # ISO 80000-2 compliant Set Difference Operator
-            (r'\\', Operator),  # M2R10 + ObjM2
-            # Relational Operators
-            (r'[=#<>]', Operator),
-            # Dereferencing Operator
-            (r'\^', Operator),
-            # Dereferencing Operator Synonym
-            (r'@', Operator),  # ISO
-            # Logical AND Operator Synonym
-            (r'&', Operator),  # PIM + ISO
-            # Logical NOT Operator Synonym
-            (r'~', Operator),  # PIM + ISO
-            # Smalltalk Message Prefix
-            (r'`', Operator),  # ObjM2
-        ],
-        'digraph_punctuation': [
-            # Range Constructor
-            (r'\.\.', Punctuation),
-            # Opening Chevron Bracket
-            (r'<<', Punctuation),  # M2R10 + ISO
-            # Closing Chevron Bracket
-            (r'>>', Punctuation),  # M2R10 + ISO
-            # Blueprint Punctuation
-            (r'->', Punctuation),  # M2R10 + ISO
-            # Distinguish |# and # in M2 R10
-            (r'\|#', Punctuation),
-            # Distinguish ## and # in M2 R10
-            (r'##', Punctuation),
-            # Distinguish |* and * in M2 R10
-            (r'\|\*', Punctuation),
-        ],
-        'unigraph_punctuation': [
-            # Common Punctuation
-            (r'[()\[\]{},.:;|]', Punctuation),
-            # Case Label Separator Synonym
-            (r'!', Punctuation),  # ISO
-            # Blueprint Punctuation
-            (r'\?', Punctuation),  # M2R10 + ObjM2
-        ],
-        'comments': [
-            # Single Line Comment
-            (r'^//.*?\n', Comment.Single),  # M2R10 + ObjM2
-            # Block Comment
-            (r'\(\*([^$].*?)\*\)', Comment.Multiline),
-            # Template Block Comment
-            (r'/\*(.*?)\*/', Comment.Multiline),  # M2R10 + ObjM2
-        ],
-        'pragmas': [
-            # ISO Style Pragmas
-            (r'<\*.*?\*>', Comment.Preproc),  # ISO, M2R10 + ObjM2
-            # Pascal Style Pragmas
-            (r'\(\*\$.*?\*\)', Comment.Preproc),  # PIM
-        ],
-        'root': [
-            include('whitespace'),
-            include('dialecttags'),
-            include('pragmas'),
-            include('comments'),
-            include('identifiers'),
-            include('suffixed_number_literals'),  # PIM + ISO
-            include('prefixed_number_literals'),  # M2R10 + ObjM2
-            include('plain_number_literals'),
-            include('string_literals'),
-            include('digraph_punctuation'),
-            include('digraph_operators'),
-            include('unigraph_punctuation'),
-            include('unigraph_operators'),
-        ]
-    }
-
-#  C o m m o n   D a t a s e t s
-
-    # Common Reserved Words Dataset
-    common_reserved_words = (
-        # 37 common reserved words
-        'AND', 'ARRAY', 'BEGIN', 'BY', 'CASE', 'CONST', 'DEFINITION', 'DIV',
-        'DO', 'ELSE', 'ELSIF', 'END', 'EXIT', 'FOR', 'FROM', 'IF',
-        'IMPLEMENTATION', 'IMPORT', 'IN', 'LOOP', 'MOD', 'MODULE', 'NOT',
-        'OF', 'OR', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
-        'SET', 'THEN', 'TO', 'TYPE', 'UNTIL', 'VAR', 'WHILE',
-    )
-
-    # Common Builtins Dataset
-    common_builtins = (
-        # 16 common builtins
-        'ABS', 'BOOLEAN', 'CARDINAL', 'CHAR', 'CHR', 'FALSE', 'INTEGER',
-        'LONGINT', 'LONGREAL', 'MAX', 'MIN', 'NIL', 'ODD', 'ORD', 'REAL',
-        'TRUE',
-    )
-
-    # Common Pseudo-Module Builtins Dataset
-    common_pseudo_builtins = (
-        # 4 common pseudo builtins
-        'ADDRESS', 'BYTE', 'WORD', 'ADR'
-    )
-
-#  P I M   M o d u l a - 2   D a t a s e t s
-
-    # Lexemes to Mark as Error Tokens for PIM Modula-2
-    pim_lexemes_to_reject = (
-        '!', '`', '@', '$', '%', '?', '\\', '==', '++', '--', '::', '*.',
-        '+>', '->', '<<', '>>', '|#', '##',
-    )
-
-    # PIM Modula-2 Additional Reserved Words Dataset
-    pim_additional_reserved_words = (
-        # 3 additional reserved words
-        'EXPORT', 'QUALIFIED', 'WITH',
-    )
-
-    # PIM Modula-2 Additional Builtins Dataset
-    pim_additional_builtins = (
-        # 16 additional builtins
-        'BITSET', 'CAP', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT', 'HALT', 'HIGH',
-        'INC', 'INCL', 'NEW', 'NIL', 'PROC', 'SIZE', 'TRUNC', 'VAL',
-    )
-
-    # PIM Modula-2 Additional Pseudo-Module Builtins Dataset
-    pim_additional_pseudo_builtins = (
-        # 5 additional pseudo builtins
-        'SYSTEM', 'PROCESS', 'TSIZE', 'NEWPROCESS', 'TRANSFER',
-    )
-
-#  I S O   M o d u l a - 2   D a t a s e t s
-
-    # Lexemes to Mark as Error Tokens for ISO Modula-2
-    iso_lexemes_to_reject = (
-        '`', '$', '%', '?', '\\', '==', '++', '--', '::', '*.', '+>', '->',
-        '<<', '>>', '|#', '##',
-    )
-
-    # ISO Modula-2 Additional Reserved Words Dataset
-    iso_additional_reserved_words = (
-        # 9 additional reserved words (ISO 10514-1)
-        'EXCEPT', 'EXPORT', 'FINALLY', 'FORWARD', 'PACKEDSET', 'QUALIFIED',
-        'REM', 'RETRY', 'WITH',
-        # 10 additional reserved words (ISO 10514-2 & ISO 10514-3)
-        'ABSTRACT', 'AS', 'CLASS', 'GUARD', 'INHERIT', 'OVERRIDE', 'READONLY',
-        'REVEAL', 'TRACED', 'UNSAFEGUARDED',
-    )
-
-    # ISO Modula-2 Additional Builtins Dataset
-    iso_additional_builtins = (
-        # 26 additional builtins (ISO 10514-1)
-        'BITSET', 'CAP', 'CMPLX', 'COMPLEX', 'DEC', 'DISPOSE', 'EXCL', 'FLOAT',
-        'HALT', 'HIGH', 'IM', 'INC', 'INCL', 'INT', 'INTERRUPTIBLE',  'LENGTH',
-        'LFLOAT', 'LONGCOMPLEX', 'NEW', 'PROC', 'PROTECTION', 'RE', 'SIZE',
-        'TRUNC', 'UNINTERRUBTIBLE', 'VAL',
-        # 5 additional builtins (ISO 10514-2 & ISO 10514-3)
-        'CREATE', 'DESTROY', 'EMPTY', 'ISMEMBER', 'SELF',
-    )
-
-    # ISO Modula-2 Additional Pseudo-Module Builtins Dataset
-    iso_additional_pseudo_builtins = (
-        # 14 additional builtins (SYSTEM)
-        'SYSTEM', 'BITSPERLOC', 'LOCSPERBYTE', 'LOCSPERWORD', 'LOC',
-        'ADDADR', 'SUBADR', 'DIFADR', 'MAKEADR', 'ADR',
-        'ROTATE', 'SHIFT', 'CAST', 'TSIZE',
-        # 13 additional builtins (COROUTINES)
-        'COROUTINES', 'ATTACH', 'COROUTINE', 'CURRENT', 'DETACH', 'HANDLER',
-        'INTERRUPTSOURCE', 'IOTRANSFER', 'IsATTACHED', 'LISTEN',
-        'NEWCOROUTINE', 'PROT', 'TRANSFER',
-        # 9 additional builtins (EXCEPTIONS)
-        'EXCEPTIONS', 'AllocateSource', 'CurrentNumber', 'ExceptionNumber',
-        'ExceptionSource', 'GetMessage', 'IsCurrentSource',
-        'IsExceptionalExecution', 'RAISE',
-        # 3 additional builtins (TERMINATION)
-        'TERMINATION', 'IsTerminating', 'HasHalted',
-        # 4 additional builtins (M2EXCEPTION)
-        'M2EXCEPTION', 'M2Exceptions', 'M2Exception', 'IsM2Exception',
-        'indexException', 'rangeException', 'caseSelectException',
-        'invalidLocation', 'functionException', 'wholeValueException',
-        'wholeDivException', 'realValueException', 'realDivException',
-        'complexValueException', 'complexDivException', 'protException',
-        'sysException', 'coException', 'exException',
-    )
-
-#  M o d u l a - 2   R 1 0   D a t a s e t s
-
-    # Lexemes to Mark as Error Tokens for Modula-2 R10
-    m2r10_lexemes_to_reject = (
-        '!', '`', '@', '$', '%', '&', '<>',
-    )
-
-    # Modula-2 R10 reserved words in addition to the common set
-    m2r10_additional_reserved_words = (
-        # 12 additional reserved words
-        'ALIAS', 'ARGLIST', 'BLUEPRINT', 'COPY', 'GENLIB', 'INDETERMINATE',
-        'NEW', 'NONE', 'OPAQUE', 'REFERENTIAL', 'RELEASE', 'RETAIN',
-        # 2 additional reserved words with symbolic assembly option
-        'ASM', 'REG',
-    )
-
-    # Modula-2 R10 builtins in addition to the common set
-    m2r10_additional_builtins = (
-        # 26 additional builtins
-        'CARDINAL', 'COUNT', 'EMPTY', 'EXISTS', 'INSERT', 'LENGTH', 'LONGCARD',
-        'OCTET', 'PTR', 'PRED', 'READ', 'READNEW', 'REMOVE', 'RETRIEVE', 'SORT',
-        'STORE', 'SUBSET', 'SUCC', 'TLIMIT', 'TMAX', 'TMIN', 'TRUE', 'TSIZE',
-        'UNICHAR', 'WRITE', 'WRITEF',
-    )
-
-    # Modula-2 R10 Additional Pseudo-Module Builtins Dataset
-    m2r10_additional_pseudo_builtins = (
-        # 13 additional builtins (TPROPERTIES)
-        'TPROPERTIES', 'PROPERTY', 'LITERAL', 'TPROPERTY', 'TLITERAL',
-        'TBUILTIN', 'TDYN', 'TREFC', 'TNIL', 'TBASE', 'TPRECISION',
-        'TMAXEXP', 'TMINEXP',
-        # 4 additional builtins (CONVERSION)
-        'CONVERSION', 'TSXFSIZE', 'SXF', 'VAL',
-        # 35 additional builtins (UNSAFE)
-        'UNSAFE', 'CAST', 'INTRINSIC', 'AVAIL', 'ADD', 'SUB', 'ADDC', 'SUBC',
-        'FETCHADD', 'FETCHSUB', 'SHL', 'SHR', 'ASHR', 'ROTL', 'ROTR', 'ROTLC',
-        'ROTRC', 'BWNOT', 'BWAND', 'BWOR', 'BWXOR', 'BWNAND', 'BWNOR',
-        'SETBIT', 'TESTBIT', 'LSBIT', 'MSBIT', 'CSBITS', 'BAIL', 'HALT',
-        'TODO', 'FFI', 'ADDR', 'VARGLIST', 'VARGC',
-        # 11 additional builtins (ATOMIC)
-        'ATOMIC', 'INTRINSIC', 'AVAIL', 'SWAP', 'CAS', 'INC', 'DEC', 'BWAND',
-        'BWNAND', 'BWOR', 'BWXOR',
-        # 7 additional builtins (COMPILER)
-        'COMPILER', 'DEBUG', 'MODNAME', 'PROCNAME', 'LINENUM', 'DEFAULT',
-        'HASH',
-        # 5 additional builtins (ASSEMBLER)
-        'ASSEMBLER', 'REGISTER', 'SETREG', 'GETREG', 'CODE',
-    )
-
-#  O b j e c t i v e   M o d u l a - 2   D a t a s e t s
-
-    # Lexemes to Mark as Error Tokens for Objective Modula-2
-    objm2_lexemes_to_reject = (
-        '!', '$', '%', '&', '<>',
-    )
-
-    # Objective Modula-2 Extensions
-    # reserved words in addition to Modula-2 R10
-    objm2_additional_reserved_words = (
-        # 16 additional reserved words
-        'BYCOPY', 'BYREF', 'CLASS', 'CONTINUE', 'CRITICAL', 'INOUT', 'METHOD',
-        'ON', 'OPTIONAL', 'OUT', 'PRIVATE', 'PROTECTED', 'PROTOCOL', 'PUBLIC',
-        'SUPER', 'TRY',
-    )
-
-    # Objective Modula-2 Extensions
-    # builtins in addition to Modula-2 R10
-    objm2_additional_builtins = (
-        # 3 additional builtins
-        'OBJECT', 'NO', 'YES',
-    )
-
-    # Objective Modula-2 Extensions
-    # pseudo-module builtins in addition to Modula-2 R10
-    objm2_additional_pseudo_builtins = (
-        # None
-    )
-
-#  A g l e t   M o d u l a - 2   D a t a s e t s
-
-    # Aglet Extensions
-    # reserved words in addition to ISO Modula-2
-    aglet_additional_reserved_words = (
-        # None
-    )
-
-    # Aglet Extensions
-    # builtins in addition to ISO Modula-2
-    aglet_additional_builtins = (
-        # 9 additional builtins
-        'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
-        'CARDINAL32', 'INTEGER8', 'INTEGER16', 'INTEGER32',
-    )
-
-    # Aglet Modula-2 Extensions
-    # pseudo-module builtins in addition to ISO Modula-2
-    aglet_additional_pseudo_builtins = (
-        # None
-    )
-
-#  G N U   M o d u l a - 2   D a t a s e t s
-
-    # GNU Extensions
-    # reserved words in addition to PIM Modula-2
-    gm2_additional_reserved_words = (
-        # 10 additional reserved words
-        'ASM', '__ATTRIBUTE__', '__BUILTIN__', '__COLUMN__', '__DATE__',
-        '__FILE__', '__FUNCTION__', '__LINE__', '__MODULE__', 'VOLATILE',
-    )
-
-    # GNU Extensions
-    # builtins in addition to PIM Modula-2
-    gm2_additional_builtins = (
-        # 21 additional builtins
-        'BITSET8', 'BITSET16', 'BITSET32', 'CARDINAL8', 'CARDINAL16',
-        'CARDINAL32', 'CARDINAL64', 'COMPLEX32', 'COMPLEX64', 'COMPLEX96',
-        'COMPLEX128', 'INTEGER8', 'INTEGER16', 'INTEGER32', 'INTEGER64',
-        'REAL8', 'REAL16', 'REAL32', 'REAL96', 'REAL128', 'THROW',
-    )
-
-    # GNU Extensions
-    # pseudo-module builtins in addition to PIM Modula-2
-    gm2_additional_pseudo_builtins = (
-        # None
-    )
-
-#  p 1   M o d u l a - 2   D a t a s e t s
-
-    # p1 Extensions
-    # reserved words in addition to ISO Modula-2
-    p1_additional_reserved_words = (
-        # None
-    )
-
-    # p1 Extensions
-    # builtins in addition to ISO Modula-2
-    p1_additional_builtins = (
-        # None
-    )
-
-    # p1 Modula-2 Extensions
-    # pseudo-module builtins in addition to ISO Modula-2
-    p1_additional_pseudo_builtins = (
-        # 1 additional builtin
-        'BCD',
-    )
-
-#  X D S   M o d u l a - 2   D a t a s e t s
-
-    # XDS Extensions
-    # reserved words in addition to ISO Modula-2
-    xds_additional_reserved_words = (
-        # 1 additional reserved word
-        'SEQ',
-    )
-
-    # XDS Extensions
-    # builtins in addition to ISO Modula-2
-    xds_additional_builtins = (
-        # 9 additional builtins
-        'ASH', 'ASSERT', 'DIFFADR_TYPE', 'ENTIER', 'INDEX', 'LEN',
-        'LONGCARD', 'SHORTCARD', 'SHORTINT',
-    )
-
-    # XDS Modula-2 Extensions
-    # pseudo-module builtins in addition to ISO Modula-2
-    xds_additional_pseudo_builtins = (
-        # 22 additional builtins (SYSTEM)
-        'PROCESS', 'NEWPROCESS', 'BOOL8', 'BOOL16', 'BOOL32', 'CARD8',
-        'CARD16', 'CARD32', 'INT8', 'INT16', 'INT32', 'REF', 'MOVE',
-        'FILL', 'GET', 'PUT', 'CC', 'int', 'unsigned', 'size_t', 'void'
-        # 3 additional builtins (COMPILER)
-        'COMPILER', 'OPTION', 'EQUATION'
-    )
-
-#  P I M   S t a n d a r d   L i b r a r y   D a t a s e t s
-
-    # PIM Modula-2 Standard Library Modules Dataset
-    pim_stdlib_module_identifiers = (
-        'Terminal', 'FileSystem', 'InOut', 'RealInOut', 'MathLib0', 'Storage',
-    )
-
-    # PIM Modula-2 Standard Library Types Dataset
-    pim_stdlib_type_identifiers = (
-        'Flag', 'FlagSet', 'Response', 'Command', 'Lock', 'Permission',
-        'MediumType', 'File', 'FileProc', 'DirectoryProc', 'FileCommand',
-        'DirectoryCommand',
-    )
-
-    # PIM Modula-2 Standard Library Procedures Dataset
-    pim_stdlib_proc_identifiers = (
-        'Read', 'BusyRead', 'ReadAgain', 'Write', 'WriteString', 'WriteLn',
-        'Create', 'Lookup', 'Close', 'Delete', 'Rename', 'SetRead', 'SetWrite',
-        'SetModify', 'SetOpen', 'Doio', 'SetPos', 'GetPos', 'Length', 'Reset',
-        'Again', 'ReadWord', 'WriteWord', 'ReadChar', 'WriteChar',
-        'CreateMedium', 'DeleteMedium', 'AssignName', 'DeassignName',
-        'ReadMedium', 'LookupMedium', 'OpenInput', 'OpenOutput', 'CloseInput',
-        'CloseOutput', 'ReadString', 'ReadInt', 'ReadCard', 'ReadWrd',
-        'WriteInt', 'WriteCard', 'WriteOct', 'WriteHex', 'WriteWrd',
-        'ReadReal', 'WriteReal', 'WriteFixPt', 'WriteRealOct', 'sqrt', 'exp',
-        'ln', 'sin', 'cos', 'arctan', 'entier', 'ALLOCATE', 'DEALLOCATE',
-    )
-
-    # PIM Modula-2 Standard Library Variables Dataset
-    pim_stdlib_var_identifiers = (
-        'Done', 'termCH', 'in', 'out'
-    )
-
-    # PIM Modula-2 Standard Library Constants Dataset
-    pim_stdlib_const_identifiers = (
-        'EOL',
-    )
-
-#  I S O   S t a n d a r d   L i b r a r y   D a t a s e t s
-
-    # ISO Modula-2 Standard Library Modules Dataset
-    iso_stdlib_module_identifiers = (
-        # TO DO
-    )
-
-    # ISO Modula-2 Standard Library Types Dataset
-    iso_stdlib_type_identifiers = (
-        # TO DO
-    )
-
-    # ISO Modula-2 Standard Library Procedures Dataset
-    iso_stdlib_proc_identifiers = (
-        # TO DO
-    )
-
-    # ISO Modula-2 Standard Library Variables Dataset
-    iso_stdlib_var_identifiers = (
-        # TO DO
-    )
-
-    # ISO Modula-2 Standard Library Constants Dataset
-    iso_stdlib_const_identifiers = (
-        # TO DO
-    )
-
-#  M 2   R 1 0   S t a n d a r d   L i b r a r y   D a t a s e t s
-
-    # Modula-2 R10 Standard Library ADTs Dataset
-    m2r10_stdlib_adt_identifiers = (
-        'BCD', 'LONGBCD', 'BITSET', 'SHORTBITSET', 'LONGBITSET',
-        'LONGLONGBITSET', 'COMPLEX', 'LONGCOMPLEX', 'SHORTCARD', 'LONGLONGCARD',
-        'SHORTINT', 'LONGLONGINT', 'POSINT', 'SHORTPOSINT', 'LONGPOSINT',
-        'LONGLONGPOSINT', 'BITSET8', 'BITSET16', 'BITSET32', 'BITSET64',
-        'BITSET128', 'BS8', 'BS16', 'BS32', 'BS64', 'BS128', 'CARDINAL8',
-        'CARDINAL16', 'CARDINAL32', 'CARDINAL64', 'CARDINAL128', 'CARD8',
-        'CARD16', 'CARD32', 'CARD64', 'CARD128', 'INTEGER8', 'INTEGER16',
-        'INTEGER32', 'INTEGER64', 'INTEGER128', 'INT8', 'INT16', 'INT32',
-        'INT64', 'INT128', 'STRING', 'UNISTRING',
-    )
-
-    # Modula-2 R10 Standard Library Blueprints Dataset
-    m2r10_stdlib_blueprint_identifiers = (
-        'ProtoRoot', 'ProtoComputational', 'ProtoNumeric', 'ProtoScalar',
-        'ProtoNonScalar', 'ProtoCardinal', 'ProtoInteger', 'ProtoReal',
-        'ProtoComplex', 'ProtoVector', 'ProtoTuple', 'ProtoCompArray',
-        'ProtoCollection', 'ProtoStaticArray', 'ProtoStaticSet',
-        'ProtoStaticString', 'ProtoArray', 'ProtoString', 'ProtoSet',
-        'ProtoMultiSet', 'ProtoDictionary', 'ProtoMultiDict', 'ProtoExtension',
-        'ProtoIO', 'ProtoCardMath', 'ProtoIntMath', 'ProtoRealMath',
-    )
-
-    # Modula-2 R10 Standard Library Modules Dataset
-    m2r10_stdlib_module_identifiers = (
-        'ASCII', 'BooleanIO', 'CharIO', 'UnicharIO', 'OctetIO',
-        'CardinalIO', 'LongCardIO', 'IntegerIO', 'LongIntIO', 'RealIO',
-        'LongRealIO', 'BCDIO', 'LongBCDIO', 'CardMath', 'LongCardMath',
-        'IntMath', 'LongIntMath', 'RealMath', 'LongRealMath', 'BCDMath',
-        'LongBCDMath', 'FileIO', 'FileSystem', 'Storage', 'IOSupport',
-    )
-
-    # Modula-2 R10 Standard Library Types Dataset
-    m2r10_stdlib_type_identifiers = (
-        'File', 'Status',
-        # TO BE COMPLETED
-    )
-
-    # Modula-2 R10 Standard Library Procedures Dataset
-    m2r10_stdlib_proc_identifiers = (
-        'ALLOCATE', 'DEALLOCATE', 'SIZE',
-        # TO BE COMPLETED
-    )
-
-    # Modula-2 R10 Standard Library Variables Dataset
-    m2r10_stdlib_var_identifiers = (
-        'stdIn', 'stdOut', 'stdErr',
-    )
-
-    # Modula-2 R10 Standard Library Constants Dataset
-    m2r10_stdlib_const_identifiers = (
-        'pi', 'tau',
-    )
-
-#  D i a l e c t s
-
-    # Dialect modes
-    dialects = (
-        'unknown',
-        'm2pim', 'm2iso', 'm2r10', 'objm2',
-        'm2iso+aglet', 'm2pim+gm2', 'm2iso+p1', 'm2iso+xds',
-    )
-
-#   D a t a b a s e s
-
-    # Lexemes to Mark as Errors Database
-    lexemes_to_reject_db = {
-        # Lexemes to reject for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Lexemes to reject for PIM Modula-2
-        'm2pim': (
-            pim_lexemes_to_reject,
-        ),
-        # Lexemes to reject for ISO Modula-2
-        'm2iso': (
-            iso_lexemes_to_reject,
-        ),
-        # Lexemes to reject for Modula-2 R10
-        'm2r10': (
-            m2r10_lexemes_to_reject,
-        ),
-        # Lexemes to reject for Objective Modula-2
-        'objm2': (
-            objm2_lexemes_to_reject,
-        ),
-        # Lexemes to reject for Aglet Modula-2
-        'm2iso+aglet': (
-            iso_lexemes_to_reject,
-        ),
-        # Lexemes to reject for GNU Modula-2
-        'm2pim+gm2': (
-            pim_lexemes_to_reject,
-        ),
-        # Lexemes to reject for p1 Modula-2
-        'm2iso+p1': (
-            iso_lexemes_to_reject,
-        ),
-        # Lexemes to reject for XDS Modula-2
-        'm2iso+xds': (
-            iso_lexemes_to_reject,
-        ),
-    }
-
-    # Reserved Words Database
-    reserved_words_db = {
-        # Reserved words for unknown dialect
-        'unknown': (
-            common_reserved_words,
-            pim_additional_reserved_words,
-            iso_additional_reserved_words,
-            m2r10_additional_reserved_words,
-        ),
-
-        # Reserved words for PIM Modula-2
-        'm2pim': (
-            common_reserved_words,
-            pim_additional_reserved_words,
-        ),
-
-        # Reserved words for Modula-2 R10
-        'm2iso': (
-            common_reserved_words,
-            iso_additional_reserved_words,
-        ),
-
-        # Reserved words for ISO Modula-2
-        'm2r10': (
-            common_reserved_words,
-            m2r10_additional_reserved_words,
-        ),
-
-        # Reserved words for Objective Modula-2
-        'objm2': (
-            common_reserved_words,
-            m2r10_additional_reserved_words,
-            objm2_additional_reserved_words,
-        ),
-
-        # Reserved words for Aglet Modula-2 Extensions
-        'm2iso+aglet': (
-            common_reserved_words,
-            iso_additional_reserved_words,
-            aglet_additional_reserved_words,
-        ),
-
-        # Reserved words for GNU Modula-2 Extensions
-        'm2pim+gm2': (
-            common_reserved_words,
-            pim_additional_reserved_words,
-            gm2_additional_reserved_words,
-        ),
-
-        # Reserved words for p1 Modula-2 Extensions
-        'm2iso+p1': (
-            common_reserved_words,
-            iso_additional_reserved_words,
-            p1_additional_reserved_words,
-        ),
-
-        # Reserved words for XDS Modula-2 Extensions
-        'm2iso+xds': (
-            common_reserved_words,
-            iso_additional_reserved_words,
-            xds_additional_reserved_words,
-        ),
-    }
-
-    # Builtins Database
-    builtins_db = {
-        # Builtins for unknown dialect
-        'unknown': (
-            common_builtins,
-            pim_additional_builtins,
-            iso_additional_builtins,
-            m2r10_additional_builtins,
-        ),
-
-        # Builtins for PIM Modula-2
-        'm2pim': (
-            common_builtins,
-            pim_additional_builtins,
-        ),
-
-        # Builtins for ISO Modula-2
-        'm2iso': (
-            common_builtins,
-            iso_additional_builtins,
-        ),
-
-        # Builtins for ISO Modula-2
-        'm2r10': (
-            common_builtins,
-            m2r10_additional_builtins,
-        ),
-
-        # Builtins for Objective Modula-2
-        'objm2': (
-            common_builtins,
-            m2r10_additional_builtins,
-            objm2_additional_builtins,
-        ),
-
-        # Builtins for Aglet Modula-2 Extensions
-        'm2iso+aglet': (
-            common_builtins,
-            iso_additional_builtins,
-            aglet_additional_builtins,
-        ),
-
-        # Builtins for GNU Modula-2 Extensions
-        'm2pim+gm2': (
-            common_builtins,
-            pim_additional_builtins,
-            gm2_additional_builtins,
-        ),
-
-        # Builtins for p1 Modula-2 Extensions
-        'm2iso+p1': (
-            common_builtins,
-            iso_additional_builtins,
-            p1_additional_builtins,
-        ),
-
-        # Builtins for XDS Modula-2 Extensions
-        'm2iso+xds': (
-            common_builtins,
-            iso_additional_builtins,
-            xds_additional_builtins,
-        ),
-    }
-
-    # Pseudo-Module Builtins Database
-    pseudo_builtins_db = {
-        # Builtins for unknown dialect
-        'unknown': (
-            common_pseudo_builtins,
-            pim_additional_pseudo_builtins,
-            iso_additional_pseudo_builtins,
-            m2r10_additional_pseudo_builtins,
-        ),
-
-        # Builtins for PIM Modula-2
-        'm2pim': (
-            common_pseudo_builtins,
-            pim_additional_pseudo_builtins,
-        ),
-
-        # Builtins for ISO Modula-2
-        'm2iso': (
-            common_pseudo_builtins,
-            iso_additional_pseudo_builtins,
-        ),
-
-        # Builtins for ISO Modula-2
-        'm2r10': (
-            common_pseudo_builtins,
-            m2r10_additional_pseudo_builtins,
-        ),
-
-        # Builtins for Objective Modula-2
-        'objm2': (
-            common_pseudo_builtins,
-            m2r10_additional_pseudo_builtins,
-            objm2_additional_pseudo_builtins,
-        ),
-
-        # Builtins for Aglet Modula-2 Extensions
-        'm2iso+aglet': (
-            common_pseudo_builtins,
-            iso_additional_pseudo_builtins,
-            aglet_additional_pseudo_builtins,
-        ),
-
-        # Builtins for GNU Modula-2 Extensions
-        'm2pim+gm2': (
-            common_pseudo_builtins,
-            pim_additional_pseudo_builtins,
-            gm2_additional_pseudo_builtins,
-        ),
-
-        # Builtins for p1 Modula-2 Extensions
-        'm2iso+p1': (
-            common_pseudo_builtins,
-            iso_additional_pseudo_builtins,
-            p1_additional_pseudo_builtins,
-        ),
-
-        # Builtins for XDS Modula-2 Extensions
-        'm2iso+xds': (
-            common_pseudo_builtins,
-            iso_additional_pseudo_builtins,
-            xds_additional_pseudo_builtins,
-        ),
-    }
-
-    # Standard Library ADTs Database
-    stdlib_adts_db = {
-        # Empty entry for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Standard Library ADTs for PIM Modula-2
-        'm2pim': (
-            # No first class library types
-        ),
-
-        # Standard Library ADTs for ISO Modula-2
-        'm2iso': (
-            # No first class library types
-        ),
-
-        # Standard Library ADTs for Modula-2 R10
-        'm2r10': (
-            m2r10_stdlib_adt_identifiers,
-        ),
-
-        # Standard Library ADTs for Objective Modula-2
-        'objm2': (
-            m2r10_stdlib_adt_identifiers,
-        ),
-
-        # Standard Library ADTs for Aglet Modula-2
-        'm2iso+aglet': (
-            # No first class library types
-        ),
-
-        # Standard Library ADTs for GNU Modula-2
-        'm2pim+gm2': (
-            # No first class library types
-        ),
-
-        # Standard Library ADTs for p1 Modula-2
-        'm2iso+p1': (
-            # No first class library types
-        ),
-
-        # Standard Library ADTs for XDS Modula-2
-        'm2iso+xds': (
-            # No first class library types
-        ),
-    }
-
-    # Standard Library Modules Database
-    stdlib_modules_db = {
-        # Empty entry for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Standard Library Modules for PIM Modula-2
-        'm2pim': (
-            pim_stdlib_module_identifiers,
-        ),
-
-        # Standard Library Modules for ISO Modula-2
-        'm2iso': (
-            iso_stdlib_module_identifiers,
-        ),
-
-        # Standard Library Modules for Modula-2 R10
-        'm2r10': (
-            m2r10_stdlib_blueprint_identifiers,
-            m2r10_stdlib_module_identifiers,
-            m2r10_stdlib_adt_identifiers,
-        ),
-
-        # Standard Library Modules for Objective Modula-2
-        'objm2': (
-            m2r10_stdlib_blueprint_identifiers,
-            m2r10_stdlib_module_identifiers,
-        ),
-
-        # Standard Library Modules for Aglet Modula-2
-        'm2iso+aglet': (
-            iso_stdlib_module_identifiers,
-        ),
-
-        # Standard Library Modules for GNU Modula-2
-        'm2pim+gm2': (
-            pim_stdlib_module_identifiers,
-        ),
-
-        # Standard Library Modules for p1 Modula-2
-        'm2iso+p1': (
-            iso_stdlib_module_identifiers,
-        ),
-
-        # Standard Library Modules for XDS Modula-2
-        'm2iso+xds': (
-            iso_stdlib_module_identifiers,
-        ),
-    }
-
-    # Standard Library Types Database
-    stdlib_types_db = {
-        # Empty entry for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Standard Library Types for PIM Modula-2
-        'm2pim': (
-            pim_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for ISO Modula-2
-        'm2iso': (
-            iso_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for Modula-2 R10
-        'm2r10': (
-            m2r10_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for Objective Modula-2
-        'objm2': (
-            m2r10_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for Aglet Modula-2
-        'm2iso+aglet': (
-            iso_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for GNU Modula-2
-        'm2pim+gm2': (
-            pim_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for p1 Modula-2
-        'm2iso+p1': (
-            iso_stdlib_type_identifiers,
-        ),
-
-        # Standard Library Types for XDS Modula-2
-        'm2iso+xds': (
-            iso_stdlib_type_identifiers,
-        ),
-    }
-
-    # Standard Library Procedures Database
-    stdlib_procedures_db = {
-        # Empty entry for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Standard Library Procedures for PIM Modula-2
-        'm2pim': (
-            pim_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for ISO Modula-2
-        'm2iso': (
-            iso_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for Modula-2 R10
-        'm2r10': (
-            m2r10_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for Objective Modula-2
-        'objm2': (
-            m2r10_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for Aglet Modula-2
-        'm2iso+aglet': (
-            iso_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for GNU Modula-2
-        'm2pim+gm2': (
-            pim_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for p1 Modula-2
-        'm2iso+p1': (
-            iso_stdlib_proc_identifiers,
-        ),
-
-        # Standard Library Procedures for XDS Modula-2
-        'm2iso+xds': (
-            iso_stdlib_proc_identifiers,
-        ),
-    }
-
-    # Standard Library Variables Database
-    stdlib_variables_db = {
-        # Empty entry for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Standard Library Variables for PIM Modula-2
-        'm2pim': (
-            pim_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for ISO Modula-2
-        'm2iso': (
-            iso_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for Modula-2 R10
-        'm2r10': (
-            m2r10_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for Objective Modula-2
-        'objm2': (
-            m2r10_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for Aglet Modula-2
-        'm2iso+aglet': (
-            iso_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for GNU Modula-2
-        'm2pim+gm2': (
-            pim_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for p1 Modula-2
-        'm2iso+p1': (
-            iso_stdlib_var_identifiers,
-        ),
-
-        # Standard Library Variables for XDS Modula-2
-        'm2iso+xds': (
-            iso_stdlib_var_identifiers,
-        ),
-    }
-
-    # Standard Library Constants Database
-    stdlib_constants_db = {
-        # Empty entry for unknown dialect
-        'unknown': (
-            # LEAVE THIS EMPTY
-        ),
-        # Standard Library Constants for PIM Modula-2
-        'm2pim': (
-            pim_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for ISO Modula-2
-        'm2iso': (
-            iso_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for Modula-2 R10
-        'm2r10': (
-            m2r10_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for Objective Modula-2
-        'objm2': (
-            m2r10_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for Aglet Modula-2
-        'm2iso+aglet': (
-            iso_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for GNU Modula-2
-        'm2pim+gm2': (
-            pim_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for p1 Modula-2
-        'm2iso+p1': (
-            iso_stdlib_const_identifiers,
-        ),
-
-        # Standard Library Constants for XDS Modula-2
-        'm2iso+xds': (
-            iso_stdlib_const_identifiers,
-        ),
-    }
-
-#   M e t h o d s
-
-    # initialise a lexer instance
-    def __init__(self, **options):
-        #
-        # check dialect options
-        #
-        dialects = get_list_opt(options, 'dialect', [])
-        #
-        for dialect_option in dialects:
-            if dialect_option in self.dialects[1:-1]:
-                # valid dialect option found
-                self.set_dialect(dialect_option)
-                break
-        #
-        # Fallback Mode (DEFAULT)
-        else:
-            # no valid dialect option
-            self.set_dialect('unknown')
-        #
-        self.dialect_set_by_tag = False
-        #
-        # check style options
-        #
-        styles = get_list_opt(options, 'style', [])
-        #
-        # use lowercase mode for Algol style
-        if 'algol' in styles or 'algol_nu' in styles:
-            self.algol_publication_mode = True
-        else:
-            self.algol_publication_mode = False
-        #
-        # Check option flags
-        #
-        self.treat_stdlib_adts_as_builtins = get_bool_opt(
-            options, 'treat_stdlib_adts_as_builtins', True)
-        #
-        # call superclass initialiser
-        RegexLexer.__init__(self, **options)
-
-    # Set lexer to a specified dialect
-    def set_dialect(self, dialect_id):
-        #
-        # if __debug__:
-        #    print 'entered set_dialect with arg: ', dialect_id
-        #
-        # check dialect name against known dialects
-        if dialect_id not in self.dialects:
-            dialect = 'unknown'  # default
-        else:
-            dialect = dialect_id
-        #
-        # compose lexemes to reject set
-        lexemes_to_reject_set = set()
-        # add each list of reject lexemes for this dialect
-        for list in self.lexemes_to_reject_db[dialect]:
-            lexemes_to_reject_set.update(set(list))
-        #
-        # compose reserved words set
-        reswords_set = set()
-        # add each list of reserved words for this dialect
-        for list in self.reserved_words_db[dialect]:
-            reswords_set.update(set(list))
-        #
-        # compose builtins set
-        builtins_set = set()
-        # add each list of builtins for this dialect excluding reserved words
-        for list in self.builtins_db[dialect]:
-            builtins_set.update(set(list).difference(reswords_set))
-        #
-        # compose pseudo-builtins set
-        pseudo_builtins_set = set()
-        # add each list of builtins for this dialect excluding reserved words
-        for list in self.pseudo_builtins_db[dialect]:
-            pseudo_builtins_set.update(set(list).difference(reswords_set))
-        #
-        # compose ADTs set
-        adts_set = set()
-        # add each list of ADTs for this dialect excluding reserved words
-        for list in self.stdlib_adts_db[dialect]:
-            adts_set.update(set(list).difference(reswords_set))
-        #
-        # compose modules set
-        modules_set = set()
-        # add each list of builtins for this dialect excluding builtins
-        for list in self.stdlib_modules_db[dialect]:
-            modules_set.update(set(list).difference(builtins_set))
-        #
-        # compose types set
-        types_set = set()
-        # add each list of types for this dialect excluding builtins
-        for list in self.stdlib_types_db[dialect]:
-            types_set.update(set(list).difference(builtins_set))
-        #
-        # compose procedures set
-        procedures_set = set()
-        # add each list of procedures for this dialect excluding builtins
-        for list in self.stdlib_procedures_db[dialect]:
-            procedures_set.update(set(list).difference(builtins_set))
-        #
-        # compose variables set
-        variables_set = set()
-        # add each list of variables for this dialect excluding builtins
-        for list in self.stdlib_variables_db[dialect]:
-            variables_set.update(set(list).difference(builtins_set))
-        #
-        # compose constants set
-        constants_set = set()
-        # add each list of constants for this dialect excluding builtins
-        for list in self.stdlib_constants_db[dialect]:
-            constants_set.update(set(list).difference(builtins_set))
-        #
-        # update lexer state
-        self.dialect = dialect
-        self.lexemes_to_reject = lexemes_to_reject_set
-        self.reserved_words = reswords_set
-        self.builtins = builtins_set
-        self.pseudo_builtins = pseudo_builtins_set
-        self.adts = adts_set
-        self.modules = modules_set
-        self.types = types_set
-        self.procedures = procedures_set
-        self.variables = variables_set
-        self.constants = constants_set
-        #
-        # if __debug__:
-        #    print 'exiting set_dialect'
-        #    print ' self.dialect: ', self.dialect
-        #    print ' self.lexemes_to_reject: ', self.lexemes_to_reject
-        #    print ' self.reserved_words: ', self.reserved_words
-        #    print ' self.builtins: ', self.builtins
-        #    print ' self.pseudo_builtins: ', self.pseudo_builtins
-        #    print ' self.adts: ', self.adts
-        #    print ' self.modules: ', self.modules
-        #    print ' self.types: ', self.types
-        #    print ' self.procedures: ', self.procedures
-        #    print ' self.variables: ', self.variables
-        #    print ' self.types: ', self.types
-        #    print ' self.constants: ', self.constants
-
-    # Extracts a dialect name from a dialect tag comment string  and checks
-    # the extracted name against known dialects.  If a match is found,  the
-    # matching name is returned, otherwise dialect id 'unknown' is returned
-    def get_dialect_from_dialect_tag(self, dialect_tag):
-        #
-        # if __debug__:
-        #    print 'entered get_dialect_from_dialect_tag with arg: ', dialect_tag
-        #
-        # constants
-        left_tag_delim = '(*!'
-        right_tag_delim = '*)'
-        left_tag_delim_len = len(left_tag_delim)
-        right_tag_delim_len = len(right_tag_delim)
-        indicator_start = left_tag_delim_len
-        indicator_end = -(right_tag_delim_len)
-        #
-        # check comment string for dialect indicator
-        if len(dialect_tag) > (left_tag_delim_len + right_tag_delim_len) \
-           and dialect_tag.startswith(left_tag_delim) \
-           and dialect_tag.endswith(right_tag_delim):
-            #
-            # if __debug__:
-            #    print 'dialect tag found'
-            #
-            # extract dialect indicator
-            indicator = dialect_tag[indicator_start:indicator_end]
-            #
-            # if __debug__:
-            #    print 'extracted: ', indicator
-            #
-            # check against known dialects
-            for index in range(1, len(self.dialects)):
-                #
-                # if __debug__:
-                #    print 'dialects[', index, ']: ', self.dialects[index]
-                #
-                if indicator == self.dialects[index]:
-                    #
-                    # if __debug__:
-                    #    print 'matching dialect found'
-                    #
-                    # indicator matches known dialect
-                    return indicator
-            else:
-                # indicator does not match any dialect
-                return 'unknown'  # default
-        else:
-            # invalid indicator string
-            return 'unknown'  # default
-
-    # intercept the token stream, modify token attributes and return them
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in RegexLexer.get_tokens_unprocessed(self, text):
-            #
-            # check for dialect tag if dialect has not been set by tag
-            if not self.dialect_set_by_tag and token == Comment.Special:
-                indicated_dialect = self.get_dialect_from_dialect_tag(value)
-                if indicated_dialect != 'unknown':
-                    # token is a dialect indicator
-                    # reset reserved words and builtins
-                    self.set_dialect(indicated_dialect)
-                    self.dialect_set_by_tag = True
-            #
-            # check for reserved words, predefined and stdlib identifiers
-            if token is Name:
-                if value in self.reserved_words:
-                    token = Keyword.Reserved
-                    if self.algol_publication_mode:
-                        value = value.lower()
-                #
-                elif value in self.builtins:
-                    token = Name.Builtin
-                    if self.algol_publication_mode:
-                        value = value.lower()
-                #
-                elif value in self.pseudo_builtins:
-                    token = Name.Builtin.Pseudo
-                    if self.algol_publication_mode:
-                        value = value.lower()
-                #
-                elif value in self.adts:
-                    if not self.treat_stdlib_adts_as_builtins:
-                        token = Name.Namespace
-                    else:
-                        token = Name.Builtin.Pseudo
-                        if self.algol_publication_mode:
-                            value = value.lower()
-                #
-                elif value in self.modules:
-                    token = Name.Namespace
-                #
-                elif value in self.types:
-                    token = Name.Class
-                #
-                elif value in self.procedures:
-                    token = Name.Function
-                #
-                elif value in self.variables:
-                    token = Name.Variable
-                #
-                elif value in self.constants:
-                    token = Name.Constant
-            #
-            elif token in Number:
-                #
-                # mark prefix number literals as error for PIM and ISO dialects
-                if self.dialect not in ('unknown', 'm2r10', 'objm2'):
-                    if "'" in value or value[0:2] in ('0b', '0x', '0u'):
-                        token = Error
-                #
-                elif self.dialect in ('m2r10', 'objm2'):
-                    # mark base-8 number literals as errors for M2 R10 and ObjM2
-                    if token is Number.Oct:
-                        token = Error
-                    # mark suffix base-16 literals as errors for M2 R10 and ObjM2
-                    elif token is Number.Hex and 'H' in value:
-                        token = Error
-                    # mark real numbers with E as errors for M2 R10 and ObjM2
-                    elif token is Number.Float and 'E' in value:
-                        token = Error
-            #
-            elif token in Comment:
-                #
-                # mark single line comment as error for PIM and ISO dialects
-                if token is Comment.Single:
-                    if self.dialect not in ('unknown', 'm2r10', 'objm2'):
-                        token = Error
-                #
-                if token is Comment.Preproc:
-                    # mark ISO pragma as error for PIM dialects
-                    if value.startswith('<*') and \
-                       self.dialect.startswith('m2pim'):
-                        token = Error
-                    # mark PIM pragma as comment for other dialects
-                    elif value.startswith('(*$') and \
-                            self.dialect != 'unknown' and \
-                            not self.dialect.startswith('m2pim'):
-                        token = Comment.Multiline
-            #
-            else:  # token is neither Name nor Comment
-                #
-                # mark lexemes matching the dialect's error token set as errors
-                if value in self.lexemes_to_reject:
-                    token = Error
-                #
-                # substitute lexemes when in Algol mode
-                if self.algol_publication_mode:
-                    if value == '#':
-                        value = '≠'
-                    elif value == '<=':
-                        value = '≤'
-                    elif value == '>=':
-                        value = '≥'
-                    elif value == '==':
-                        value = '≡'
-                    elif value == '*.':
-                        value = '•'
-
-            # return result
-            yield index, token, value
-
-    def analyse_text(text):
-        """It's Pascal-like, but does not use FUNCTION -- uses PROCEDURE
-        instead."""
-
-        # Check if this looks like Pascal, if not, bail out early
-        if not ('(*' in text and '*)' in text and ':=' in text):
-            return
-
-        result = 0
-        # Procedure is in Modula2
-        if re.search(r'\bPROCEDURE\b', text):
-            result += 0.6
-
-        # FUNCTION is only valid in Pascal, but not in Modula2
-        if re.search(r'\bFUNCTION\b', text):
-            result = 0.0
-
-        return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/monte.py b/venv/lib/python3.11/site-packages/pygments/lexers/monte.py
deleted file mode 100644
index 18f5a03..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/monte.py
+++ /dev/null
@@ -1,204 +0,0 @@
-"""
-    pygments.lexers.monte
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for the Monte programming language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
-    Punctuation, String, Whitespace
-from pygments.lexer import RegexLexer, include, words
-
-__all__ = ['MonteLexer']
-
-
-# `var` handled separately
-# `interface` handled separately
-_declarations = ['bind', 'def', 'fn', 'object']
-_methods = ['method', 'to']
-_keywords = [
-    'as', 'break', 'catch', 'continue', 'else', 'escape', 'exit', 'exports',
-    'extends', 'finally', 'for', 'guards', 'if', 'implements', 'import',
-    'in', 'match', 'meta', 'pass', 'return', 'switch', 'try', 'via', 'when',
-    'while',
-]
-_operators = [
-    # Unary
-    '~', '!',
-    # Binary
-    '+', '-', '*', '/', '%', '**', '&', '|', '^', '<<', '>>',
-    # Binary augmented
-    '+=', '-=', '*=', '/=', '%=', '**=', '&=', '|=', '^=', '<<=', '>>=',
-    # Comparison
-    '==', '!=', '<', '<=', '>', '>=', '<=>',
-    # Patterns and assignment
-    ':=', '?', '=~', '!~', '=>',
-    # Calls and sends
-    '.', '<-', '->',
-]
-_escape_pattern = (
-    r'(?:\\x[0-9a-fA-F]{2}|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
-    r'\\["\'\\bftnr])')
-# _char = _escape_chars + [('.', String.Char)]
-_identifier = r'[_a-zA-Z]\w*'
-
-_constants = [
-    # Void constants
-    'null',
-    # Bool constants
-    'false', 'true',
-    # Double constants
-    'Infinity', 'NaN',
-    # Special objects
-    'M', 'Ref', 'throw', 'traceln',
-]
-
-_guards = [
-    'Any', 'Binding', 'Bool', 'Bytes', 'Char', 'DeepFrozen', 'Double',
-    'Empty', 'Int', 'List', 'Map', 'Near', 'NullOk', 'Same', 'Selfless',
-    'Set', 'Str', 'SubrangeGuard', 'Transparent', 'Void',
-]
-
-_safeScope = [
-    '_accumulateList', '_accumulateMap', '_auditedBy', '_bind',
-    '_booleanFlow', '_comparer', '_equalizer', '_iterForever', '_loop',
-    '_makeBytes', '_makeDouble', '_makeFinalSlot', '_makeInt', '_makeList',
-    '_makeMap', '_makeMessageDesc', '_makeOrderedSpace', '_makeParamDesc',
-    '_makeProtocolDesc', '_makeSourceSpan', '_makeString', '_makeVarSlot',
-    '_makeVerbFacet', '_mapExtract', '_matchSame', '_quasiMatcher',
-    '_slotToBinding', '_splitList', '_suchThat', '_switchFailed',
-    '_validateFor', 'b__quasiParser', 'eval', 'import', 'm__quasiParser',
-    'makeBrandPair', 'makeLazySlot', 'safeScope', 'simple__quasiParser',
-]
-
-
-class MonteLexer(RegexLexer):
-    """
-    Lexer for the Monte programming language.
-
-    .. versionadded:: 2.2
-    """
-    name = 'Monte'
-    url = 'https://monte.readthedocs.io/'
-    aliases = ['monte']
-    filenames = ['*.mt']
-
-    tokens = {
-        'root': [
-            # Comments
-            (r'#[^\n]*\n', Comment),
-
-            # Docstrings
-            # Apologies for the non-greedy matcher here.
-            (r'/\*\*.*?\*/', String.Doc),
-
-            # `var` declarations
-            (r'\bvar\b', Keyword.Declaration, 'var'),
-
-            # `interface` declarations
-            (r'\binterface\b', Keyword.Declaration, 'interface'),
-
-            # method declarations
-            (words(_methods, prefix='\\b', suffix='\\b'),
-             Keyword, 'method'),
-
-            # All other declarations
-            (words(_declarations, prefix='\\b', suffix='\\b'),
-             Keyword.Declaration),
-
-            # Keywords
-            (words(_keywords, prefix='\\b', suffix='\\b'), Keyword),
-
-            # Literals
-            ('[+-]?0x[_0-9a-fA-F]+', Number.Hex),
-            (r'[+-]?[_0-9]+\.[_0-9]*([eE][+-]?[_0-9]+)?', Number.Float),
-            ('[+-]?[_0-9]+', Number.Integer),
-            ("'", String.Double, 'char'),
-            ('"', String.Double, 'string'),
-
-            # Quasiliterals
-            ('`', String.Backtick, 'ql'),
-
-            # Operators
-            (words(_operators), Operator),
-
-            # Verb operators
-            (_identifier + '=', Operator.Word),
-
-            # Safe scope constants
-            (words(_constants, prefix='\\b', suffix='\\b'),
-             Keyword.Pseudo),
-
-            # Safe scope guards
-            (words(_guards, prefix='\\b', suffix='\\b'), Keyword.Type),
-
-            # All other safe scope names
-            (words(_safeScope, prefix='\\b', suffix='\\b'),
-             Name.Builtin),
-
-            # Identifiers
-            (_identifier, Name),
-
-            # Punctuation
-            (r'\(|\)|\{|\}|\[|\]|:|,', Punctuation),
-
-            # Whitespace
-            (' +', Whitespace),
-
-            # Definite lexer errors
-            ('=', Error),
-        ],
-        'char': [
-            # It is definitely an error to have a char of width == 0.
-            ("'", Error, 'root'),
-            (_escape_pattern, String.Escape, 'charEnd'),
-            ('.', String.Char, 'charEnd'),
-        ],
-        'charEnd': [
-            ("'", String.Char, '#pop:2'),
-            # It is definitely an error to have a char of width > 1.
-            ('.', Error),
-        ],
-        # The state of things coming into an interface.
-        'interface': [
-            (' +', Whitespace),
-            (_identifier, Name.Class, '#pop'),
-            include('root'),
-        ],
-        # The state of things coming into a method.
-        'method': [
-            (' +', Whitespace),
-            (_identifier, Name.Function, '#pop'),
-            include('root'),
-        ],
-        'string': [
-            ('"', String.Double, 'root'),
-            (_escape_pattern, String.Escape),
-            (r'\n', String.Double),
-            ('.', String.Double),
-        ],
-        'ql': [
-            ('`', String.Backtick, 'root'),
-            (r'\$' + _escape_pattern, String.Escape),
-            (r'\$\$', String.Escape),
-            (r'@@', String.Escape),
-            (r'\$\{', String.Interpol, 'qlNest'),
-            (r'@\{', String.Interpol, 'qlNest'),
-            (r'\$' + _identifier, Name),
-            ('@' + _identifier, Name),
-            ('.', String.Backtick),
-        ],
-        'qlNest': [
-            (r'\}', String.Interpol, '#pop'),
-            include('root'),
-        ],
-        # The state of things immediately following `var`.
-        'var': [
-            (' +', Whitespace),
-            (_identifier, Name.Variable, '#pop'),
-            include('root'),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/mosel.py b/venv/lib/python3.11/site-packages/pygments/lexers/mosel.py
deleted file mode 100644
index f3c86cc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/mosel.py
+++ /dev/null
@@ -1,447 +0,0 @@
-"""
-    pygments.lexers.mosel
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the mosel language.
-    http://www.fico.com/en/products/fico-xpress-optimization
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['MoselLexer']
-
-FUNCTIONS = (
-    # core functions
-    '_',
-    'abs',
-    'arctan',
-    'asproc',
-    'assert',
-    'bitflip',
-    'bitneg',
-    'bitset',
-    'bitshift',
-    'bittest',
-    'bitval',
-    'ceil',
-    'cos',
-    'create',
-    'currentdate',
-    'currenttime',
-    'cutelt',
-    'cutfirst',
-    'cuthead',
-    'cutlast',
-    'cuttail',
-    'datablock',
-    'delcell',
-    'exists',
-    'exit',
-    'exp',
-    'exportprob',
-    'fclose',
-    'fflush',
-    'finalize',
-    'findfirst',
-    'findlast',
-    'floor',
-    'fopen',
-    'fselect',
-    'fskipline',
-    'fwrite',
-    'fwrite_',
-    'fwriteln',
-    'fwriteln_',
-    'getact',
-    'getcoeff',
-    'getcoeffs',
-    'getdual',
-    'getelt',
-    'getfid',
-    'getfirst',
-    'getfname',
-    'gethead',
-    'getlast',
-    'getobjval',
-    'getparam',
-    'getrcost',
-    'getreadcnt',
-    'getreverse',
-    'getsize',
-    'getslack',
-    'getsol',
-    'gettail',
-    'gettype',
-    'getvars',
-    'isdynamic',
-    'iseof',
-    'isfinite',
-    'ishidden',
-    'isinf',
-    'isnan',
-    'isodd',
-    'ln',
-    'localsetparam',
-    'log',
-    'makesos1',
-    'makesos2',
-    'maxlist',
-    'memoryuse',
-    'minlist',
-    'newmuid',
-    'publish',
-    'random',
-    'read',
-    'readln',
-    'reset',
-    'restoreparam',
-    'reverse',
-    'round',
-    'setcoeff',
-    'sethidden',
-    'setioerr',
-    'setmatherr',
-    'setname',
-    'setparam',
-    'setrandseed',
-    'setrange',
-    'settype',
-    'sin',
-    'splithead',
-    'splittail',
-    'sqrt',
-    'strfmt',
-    'substr',
-    'timestamp',
-    'unpublish',
-    'versionnum',
-    'versionstr',
-    'write',
-    'write_',
-    'writeln',
-    'writeln_',
-
-    # mosel exam mmxprs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
-    'addcut',
-    'addcuts',
-    'addmipsol',
-    'basisstability',
-    'calcsolinfo',
-    'clearmipdir',
-    'clearmodcut',
-    'command',
-    'copysoltoinit',
-    'crossoverlpsol',
-    'defdelayedrows',
-    'defsecurevecs',
-    'delcuts',
-    'dropcuts',
-    'estimatemarginals',
-    'fixglobal',
-    'flushmsgq',
-    'getbstat',
-    'getcnlist',
-    'getcplist',
-    'getdualray',
-    'getiis',
-    'getiissense',
-    'getiistype',
-    'getinfcause',
-    'getinfeas',
-    'getlb',
-    'getlct',
-    'getleft',
-    'getloadedlinctrs',
-    'getloadedmpvars',
-    'getname',
-    'getprimalray',
-    'getprobstat',
-    'getrange',
-    'getright',
-    'getsensrng',
-    'getsize',
-    'getsol',
-    'gettype',
-    'getub',
-    'getvars',
-    'gety',
-    'hasfeature',
-    'implies',
-    'indicator',
-    'initglobal',
-    'ishidden',
-    'isiisvalid',
-    'isintegral',
-    'loadbasis',
-    'loadcuts',
-    'loadlpsol',
-    'loadmipsol',
-    'loadprob',
-    'maximise',
-    'maximize',
-    'minimise',
-    'minimize',
-    'postsolve',
-    'readbasis',
-    'readdirs',
-    'readsol',
-    'refinemipsol',
-    'rejectintsol',
-    'repairinfeas',
-    'repairinfeas_deprec',
-    'resetbasis',
-    'resetiis',
-    'resetsol',
-    'savebasis',
-    'savemipsol',
-    'savesol',
-    'savestate',
-    'selectsol',
-    'setarchconsistency',
-    'setbstat',
-    'setcallback',
-    'setcbcutoff',
-    'setgndata',
-    'sethidden',
-    'setlb',
-    'setmipdir',
-    'setmodcut',
-    'setsol',
-    'setub',
-    'setucbdata',
-    'stopoptimise',
-    'stopoptimize',
-    'storecut',
-    'storecuts',
-    'unloadprob',
-    'uselastbarsol',
-    'writebasis',
-    'writedirs',
-    'writeprob',
-    'writesol',
-    'xor',
-    'xprs_addctr',
-    'xprs_addindic',
-
-    # mosel exam mmsystem | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
-    'addmonths',
-    'copytext',
-    'cuttext',
-    'deltext',
-    'endswith',
-    'erase',
-    'expandpath',
-    'fcopy',
-    'fdelete',
-    'findfiles',
-    'findtext',
-    'fmove',
-    'formattext',
-    'getasnumber',
-    'getchar',
-    'getcwd',
-    'getdate',
-    'getday',
-    'getdaynum',
-    'getdays',
-    'getdirsep',
-    'getdsoparam',
-    'getendparse',
-    'getenv',
-    'getfsize',
-    'getfstat',
-    'getftime',
-    'gethour',
-    'getminute',
-    'getmonth',
-    'getmsec',
-    'getoserrmsg',
-    'getoserror',
-    'getpathsep',
-    'getqtype',
-    'getsecond',
-    'getsepchar',
-    'getsize',
-    'getstart',
-    'getsucc',
-    'getsysinfo',
-    'getsysstat',
-    'gettime',
-    'gettmpdir',
-    'gettrim',
-    'getweekday',
-    'getyear',
-    'inserttext',
-    'isvalid',
-    'jointext',
-    'makedir',
-    'makepath',
-    'newtar',
-    'newzip',
-    'nextfield',
-    'openpipe',
-    'parseextn',
-    'parseint',
-    'parsereal',
-    'parsetext',
-    'pastetext',
-    'pathmatch',
-    'pathsplit',
-    'qsort',
-    'quote',
-    'readtextline',
-    'regmatch',
-    'regreplace',
-    'removedir',
-    'removefiles',
-    'setchar',
-    'setdate',
-    'setday',
-    'setdsoparam',
-    'setendparse',
-    'setenv',
-    'sethour',
-    'setminute',
-    'setmonth',
-    'setmsec',
-    'setoserror',
-    'setqtype',
-    'setsecond',
-    'setsepchar',
-    'setstart',
-    'setsucc',
-    'settime',
-    'settrim',
-    'setyear',
-    'sleep',
-    'splittext',
-    'startswith',
-    'system',
-    'tarlist',
-    'textfmt',
-    'tolower',
-    'toupper',
-    'trim',
-    'untar',
-    'unzip',
-    'ziplist',
-
-    # mosel exam mmjobs | sed -n -e "s/ [pf][a-z]* \([a-zA-Z0-9_]*\).*/'\1',/p" | sort -u
-    'canceltimer',
-    'clearaliases',
-    'compile',
-    'connect',
-    'detach',
-    'disconnect',
-    'dropnextevent',
-    'findxsrvs',
-    'getaliases',
-    'getannidents',
-    'getannotations',
-    'getbanner',
-    'getclass',
-    'getdsoprop',
-    'getdsopropnum',
-    'getexitcode',
-    'getfromgid',
-    'getfromid',
-    'getfromuid',
-    'getgid',
-    'gethostalias',
-    'getid',
-    'getmodprop',
-    'getmodpropnum',
-    'getnextevent',
-    'getnode',
-    'getrmtid',
-    'getstatus',
-    'getsysinfo',
-    'gettimer',
-    'getuid',
-    'getvalue',
-    'isqueueempty',
-    'load',
-    'nullevent',
-    'peeknextevent',
-    'resetmodpar',
-    'run',
-    'send',
-    'setcontrol',
-    'setdefstream',
-    'setgid',
-    'sethostalias',
-    'setmodpar',
-    'settimer',
-    'setuid',
-    'setworkdir',
-    'stop',
-    'unload',
-    'wait',
-    'waitexpired',
-    'waitfor',
-    'waitforend',
-)
-
-
-class MoselLexer(RegexLexer):
-    """
-    For the Mosel optimization language.
-
-    .. versionadded:: 2.6
-    """
-    name = 'Mosel'
-    aliases = ['mosel']
-    filenames = ['*.mos']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'\s+', Text.Whitespace),
-            (r'!.*?\n', Comment.Single),
-            (r'\(!(.|\n)*?!\)', Comment.Multiline),
-            (words((
-                'and', 'as', 'break', 'case', 'count', 'declarations', 'do',
-                'dynamic', 'elif', 'else', 'end-', 'end', 'evaluation', 'false',
-                'forall', 'forward', 'from', 'function', 'hashmap', 'if',
-                'imports', 'include', 'initialisations', 'initializations', 'inter',
-                'max', 'min', 'model', 'namespace', 'next', 'not', 'nsgroup',
-                'nssearch', 'of', 'options', 'or', 'package', 'parameters',
-                'procedure', 'public', 'prod', 'record', 'repeat', 'requirements',
-                'return', 'sum', 'then', 'to', 'true', 'union', 'until', 'uses',
-                'version', 'while', 'with'), prefix=r'\b', suffix=r'\b'),
-             Keyword.Builtin),
-            (words((
-                'range', 'array', 'set', 'list', 'mpvar', 'mpproblem', 'linctr',
-                'nlctr', 'integer', 'string', 'real', 'boolean', 'text', 'time',
-                'date', 'datetime', 'returned', 'Model', 'Mosel', 'counter',
-                'xmldoc', 'is_sos1', 'is_sos2', 'is_integer', 'is_binary',
-                'is_continuous', 'is_free', 'is_semcont', 'is_semint',
-                'is_partint'), prefix=r'\b', suffix=r'\b'),
-             Keyword.Type),
-            (r'(\+|\-|\*|/|=|<=|>=|\||\^|<|>|<>|\.\.|\.|:=|::|:|in|mod|div)',
-             Operator),
-            (r'[()\[\]{},;]+', Punctuation),
-            (words(FUNCTIONS,  prefix=r'\b', suffix=r'\b'), Name.Function),
-            (r'(\d+\.(?!\.)\d*|\.(?!.)\d+)([eE][+-]?\d+)?', Number.Float),
-            (r'\d+([eE][+-]?\d+)?', Number.Integer),
-            (r'[+-]?Infinity', Number.Integer),
-            (r'0[xX][0-9a-fA-F]+', Number),
-            (r'"', String.Double, 'double_quote'),
-            (r'\'', String.Single, 'single_quote'),
-            (r'(\w+|(\.(?!\.)))', Text),
-        ],
-        'single_quote': [
-            (r'\'', String.Single, '#pop'),
-            (r'[^\']+', String.Single),
-        ],
-        'double_quote': [
-            (r'(\\"|\\[0-7]{1,3}\D|\\[abfnrtv]|\\\\)', String.Escape),
-            (r'\"', String.Double, '#pop'),
-            (r'[^"\\]+', String.Double),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ncl.py b/venv/lib/python3.11/site-packages/pygments/lexers/ncl.py
deleted file mode 100644
index b1ec145..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ncl.py
+++ /dev/null
@@ -1,893 +0,0 @@
-"""
-    pygments.lexers.ncl
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for NCAR Command Language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['NCLLexer']
-
-
-class NCLLexer(RegexLexer):
-    """
-    Lexer for NCL code.
-
-    .. versionadded:: 2.2
-    """
-    name = 'NCL'
-    aliases = ['ncl']
-    filenames = ['*.ncl']
-    mimetypes = ['text/ncl']
-    flags = re.MULTILINE
-
-    tokens = {
-        'root': [
-            (r';.*\n', Comment),
-            include('strings'),
-            include('core'),
-            (r'[a-zA-Z_]\w*', Name),
-            include('nums'),
-            (r'[\s]+', Text),
-        ],
-        'core': [
-            # Statements
-            (words((
-                'begin', 'break', 'continue', 'create', 'defaultapp', 'do',
-                'else', 'end', 'external', 'exit', 'True', 'False', 'file', 'function',
-                'getvalues', 'graphic', 'group', 'if', 'list', 'load', 'local',
-                'new', '_Missing', 'Missing', 'noparent', 'procedure',
-                'quit', 'QUIT', 'Quit', 'record', 'return', 'setvalues', 'stop',
-                'then', 'while'), prefix=r'\b', suffix=r'\s*\b'),
-             Keyword),
-
-            # Data Types
-            (words((
-                'ubyte', 'uint', 'uint64', 'ulong', 'string', 'byte',
-                'character', 'double', 'float', 'integer', 'int64', 'logical',
-                'long', 'short', 'ushort', 'enumeric', 'numeric', 'snumeric'),
-                prefix=r'\b', suffix=r'\s*\b'),
-             Keyword.Type),
-
-            # Operators
-            (r'[\%^*+\-/<>]', Operator),
-
-            # punctuation:
-            (r'[\[\]():@$!&|.,\\{}]', Punctuation),
-            (r'[=:]', Punctuation),
-
-            # Intrinsics
-            (words((
-                'abs', 'acos', 'addfile', 'addfiles', 'all', 'angmom_atm', 'any',
-                'area_conserve_remap', 'area_hi2lores', 'area_poly_sphere',
-                'asciiread', 'asciiwrite', 'asin', 'atan', 'atan2', 'attsetvalues',
-                'avg', 'betainc', 'bin_avg', 'bin_sum', 'bw_bandpass_filter',
-                'cancor', 'cbinread', 'cbinwrite', 'cd_calendar', 'cd_inv_calendar',
-                'cdfbin_p', 'cdfbin_pr', 'cdfbin_s', 'cdfbin_xn', 'cdfchi_p',
-                'cdfchi_x', 'cdfgam_p', 'cdfgam_x', 'cdfnor_p', 'cdfnor_x',
-                'cdft_p', 'cdft_t', 'ceil', 'center_finite_diff',
-                'center_finite_diff_n', 'cfftb', 'cfftf', 'cfftf_frq_reorder',
-                'charactertodouble', 'charactertofloat', 'charactertointeger',
-                'charactertolong', 'charactertoshort', 'charactertostring',
-                'chartodouble', 'chartofloat', 'chartoint', 'chartointeger',
-                'chartolong', 'chartoshort', 'chartostring', 'chiinv', 'clear',
-                'color_index_to_rgba', 'conform', 'conform_dims', 'cos', 'cosh',
-                'count_unique_values', 'covcorm', 'covcorm_xy', 'craybinnumrec',
-                'craybinrecread', 'create_graphic', 'csa1', 'csa1d', 'csa1s',
-                'csa1x', 'csa1xd', 'csa1xs', 'csa2', 'csa2d', 'csa2l', 'csa2ld',
-                'csa2ls', 'csa2lx', 'csa2lxd', 'csa2lxs', 'csa2s', 'csa2x',
-                'csa2xd', 'csa2xs', 'csa3', 'csa3d', 'csa3l', 'csa3ld', 'csa3ls',
-                'csa3lx', 'csa3lxd', 'csa3lxs', 'csa3s', 'csa3x', 'csa3xd',
-                'csa3xs', 'csc2s', 'csgetp', 'css2c', 'cssetp', 'cssgrid', 'csstri',
-                'csvoro', 'cumsum', 'cz2ccm', 'datatondc', 'day_of_week',
-                'day_of_year', 'days_in_month', 'default_fillvalue', 'delete',
-                'depth_to_pres', 'destroy', 'determinant', 'dewtemp_trh',
-                'dgeevx_lapack', 'dim_acumrun_n', 'dim_avg', 'dim_avg_n',
-                'dim_avg_wgt', 'dim_avg_wgt_n', 'dim_cumsum', 'dim_cumsum_n',
-                'dim_gamfit_n', 'dim_gbits', 'dim_max', 'dim_max_n', 'dim_median',
-                'dim_median_n', 'dim_min', 'dim_min_n', 'dim_num', 'dim_num_n',
-                'dim_numrun_n', 'dim_pqsort', 'dim_pqsort_n', 'dim_product',
-                'dim_product_n', 'dim_rmsd', 'dim_rmsd_n', 'dim_rmvmean',
-                'dim_rmvmean_n', 'dim_rmvmed', 'dim_rmvmed_n', 'dim_spi_n',
-                'dim_standardize', 'dim_standardize_n', 'dim_stat4', 'dim_stat4_n',
-                'dim_stddev', 'dim_stddev_n', 'dim_sum', 'dim_sum_n', 'dim_sum_wgt',
-                'dim_sum_wgt_n', 'dim_variance', 'dim_variance_n', 'dimsizes',
-                'doubletobyte', 'doubletochar', 'doubletocharacter',
-                'doubletofloat', 'doubletoint', 'doubletointeger', 'doubletolong',
-                'doubletoshort', 'dpres_hybrid_ccm', 'dpres_plevel', 'draw',
-                'draw_color_palette', 'dsgetp', 'dsgrid2', 'dsgrid2d', 'dsgrid2s',
-                'dsgrid3', 'dsgrid3d', 'dsgrid3s', 'dspnt2', 'dspnt2d', 'dspnt2s',
-                'dspnt3', 'dspnt3d', 'dspnt3s', 'dssetp', 'dtrend', 'dtrend_msg',
-                'dtrend_msg_n', 'dtrend_n', 'dtrend_quadratic',
-                'dtrend_quadratic_msg_n', 'dv2uvf', 'dv2uvg', 'dz_height',
-                'echo_off', 'echo_on', 'eof2data', 'eof_varimax', 'eofcor',
-                'eofcor_pcmsg', 'eofcor_ts', 'eofcov', 'eofcov_pcmsg', 'eofcov_ts',
-                'eofunc', 'eofunc_ts', 'eofunc_varimax', 'equiv_sample_size', 'erf',
-                'erfc', 'esacr', 'esacv', 'esccr', 'esccv', 'escorc', 'escorc_n',
-                'escovc', 'exit', 'exp', 'exp_tapersh', 'exp_tapersh_wgts',
-                'exp_tapershC', 'ezfftb', 'ezfftb_n', 'ezfftf', 'ezfftf_n',
-                'f2fosh', 'f2foshv', 'f2fsh', 'f2fshv', 'f2gsh', 'f2gshv', 'fabs',
-                'fbindirread', 'fbindirwrite', 'fbinnumrec', 'fbinread',
-                'fbinrecread', 'fbinrecwrite', 'fbinwrite', 'fft2db', 'fft2df',
-                'fftshift', 'fileattdef', 'filechunkdimdef', 'filedimdef',
-                'fileexists', 'filegrpdef', 'filevarattdef', 'filevarchunkdef',
-                'filevarcompressleveldef', 'filevardef', 'filevardimsizes',
-                'filwgts_lancos', 'filwgts_lanczos', 'filwgts_normal',
-                'floattobyte', 'floattochar', 'floattocharacter', 'floattoint',
-                'floattointeger', 'floattolong', 'floattoshort', 'floor',
-                'fluxEddy', 'fo2fsh', 'fo2fshv', 'fourier_info', 'frame', 'fspan',
-                'ftcurv', 'ftcurvd', 'ftcurvi', 'ftcurvp', 'ftcurvpi', 'ftcurvps',
-                'ftcurvs', 'ftest', 'ftgetp', 'ftkurv', 'ftkurvd', 'ftkurvp',
-                'ftkurvpd', 'ftsetp', 'ftsurf', 'g2fsh', 'g2fshv', 'g2gsh',
-                'g2gshv', 'gamma', 'gammainc', 'gaus', 'gaus_lobat',
-                'gaus_lobat_wgt', 'gc_aangle', 'gc_clkwise', 'gc_dangle',
-                'gc_inout', 'gc_latlon', 'gc_onarc', 'gc_pnt2gc', 'gc_qarea',
-                'gc_tarea', 'generate_2d_array', 'get_color_index',
-                'get_color_rgba', 'get_cpu_time', 'get_isolines', 'get_ncl_version',
-                'get_script_name', 'get_script_prefix_name', 'get_sphere_radius',
-                'get_unique_values', 'getbitsone', 'getenv', 'getfiledimsizes',
-                'getfilegrpnames', 'getfilepath', 'getfilevaratts',
-                'getfilevarchunkdimsizes', 'getfilevardims', 'getfilevardimsizes',
-                'getfilevarnames', 'getfilevartypes', 'getvaratts', 'getvardims',
-                'gradsf', 'gradsg', 'greg2jul', 'grid2triple', 'hlsrgb', 'hsvrgb',
-                'hydro', 'hyi2hyo', 'idsfft', 'igradsf', 'igradsg', 'ilapsf',
-                'ilapsg', 'ilapvf', 'ilapvg', 'ind', 'ind_resolve', 'int2p',
-                'int2p_n', 'integertobyte', 'integertochar', 'integertocharacter',
-                'integertoshort', 'inttobyte', 'inttochar', 'inttoshort',
-                'inverse_matrix', 'isatt', 'isbigendian', 'isbyte', 'ischar',
-                'iscoord', 'isdefined', 'isdim', 'isdimnamed', 'isdouble',
-                'isenumeric', 'isfile', 'isfilepresent', 'isfilevar',
-                'isfilevaratt', 'isfilevarcoord', 'isfilevardim', 'isfloat',
-                'isfunc', 'isgraphic', 'isint', 'isint64', 'isinteger',
-                'isleapyear', 'islogical', 'islong', 'ismissing', 'isnan_ieee',
-                'isnumeric', 'ispan', 'isproc', 'isshort', 'issnumeric', 'isstring',
-                'isubyte', 'isuint', 'isuint64', 'isulong', 'isunlimited',
-                'isunsigned', 'isushort', 'isvar', 'jul2greg', 'kmeans_as136',
-                'kolsm2_n', 'kron_product', 'lapsf', 'lapsg', 'lapvf', 'lapvg',
-                'latlon2utm', 'lclvl', 'lderuvf', 'lderuvg', 'linint1', 'linint1_n',
-                'linint2', 'linint2_points', 'linmsg', 'linmsg_n', 'linrood_latwgt',
-                'linrood_wgt', 'list_files', 'list_filevars', 'list_hlus',
-                'list_procfuncs', 'list_vars', 'ListAppend', 'ListCount',
-                'ListGetType', 'ListIndex', 'ListIndexFromName', 'ListPop',
-                'ListPush', 'ListSetType', 'loadscript', 'local_max', 'local_min',
-                'log', 'log10', 'longtobyte', 'longtochar', 'longtocharacter',
-                'longtoint', 'longtointeger', 'longtoshort', 'lspoly', 'lspoly_n',
-                'mask', 'max', 'maxind', 'min', 'minind', 'mixed_layer_depth',
-                'mixhum_ptd', 'mixhum_ptrh', 'mjo_cross_coh2pha',
-                'mjo_cross_segment', 'moc_globe_atl', 'monthday', 'natgrid',
-                'natgridd', 'natgrids', 'ncargpath', 'ncargversion', 'ndctodata',
-                'ndtooned', 'new', 'NewList', 'ngezlogo', 'nggcog', 'nggetp',
-                'nglogo', 'ngsetp', 'NhlAddAnnotation', 'NhlAddData',
-                'NhlAddOverlay', 'NhlAddPrimitive', 'NhlAppGetDefaultParentId',
-                'NhlChangeWorkstation', 'NhlClassName', 'NhlClearWorkstation',
-                'NhlDataPolygon', 'NhlDataPolyline', 'NhlDataPolymarker',
-                'NhlDataToNDC', 'NhlDestroy', 'NhlDraw', 'NhlFrame', 'NhlFreeColor',
-                'NhlGetBB', 'NhlGetClassResources', 'NhlGetErrorObjectId',
-                'NhlGetNamedColorIndex', 'NhlGetParentId',
-                'NhlGetParentWorkstation', 'NhlGetWorkspaceObjectId',
-                'NhlIsAllocatedColor', 'NhlIsApp', 'NhlIsDataComm', 'NhlIsDataItem',
-                'NhlIsDataSpec', 'NhlIsTransform', 'NhlIsView', 'NhlIsWorkstation',
-                'NhlName', 'NhlNDCPolygon', 'NhlNDCPolyline', 'NhlNDCPolymarker',
-                'NhlNDCToData', 'NhlNewColor', 'NhlNewDashPattern', 'NhlNewMarker',
-                'NhlPalGetDefined', 'NhlRemoveAnnotation', 'NhlRemoveData',
-                'NhlRemoveOverlay', 'NhlRemovePrimitive', 'NhlSetColor',
-                'NhlSetDashPattern', 'NhlSetMarker', 'NhlUpdateData',
-                'NhlUpdateWorkstation', 'nice_mnmxintvl', 'nngetaspectd',
-                'nngetaspects', 'nngetp', 'nngetsloped', 'nngetslopes', 'nngetwts',
-                'nngetwtsd', 'nnpnt', 'nnpntd', 'nnpntend', 'nnpntendd',
-                'nnpntinit', 'nnpntinitd', 'nnpntinits', 'nnpnts', 'nnsetp', 'num',
-                'obj_anal_ic', 'omega_ccm', 'onedtond', 'overlay', 'paleo_outline',
-                'pdfxy_bin', 'poisson_grid_fill', 'pop_remap', 'potmp_insitu_ocn',
-                'prcwater_dp', 'pres2hybrid', 'pres_hybrid_ccm', 'pres_sigma',
-                'print', 'print_table', 'printFileVarSummary', 'printVarSummary',
-                'product', 'pslec', 'pslhor', 'pslhyp', 'qsort', 'rand',
-                'random_chi', 'random_gamma', 'random_normal', 'random_setallseed',
-                'random_uniform', 'rcm2points', 'rcm2rgrid', 'rdsstoi',
-                'read_colormap_file', 'reg_multlin', 'regcoef', 'regCoef_n',
-                'regline', 'relhum', 'replace_ieeenan', 'reshape', 'reshape_ind',
-                'rgba_to_color_index', 'rgbhls', 'rgbhsv', 'rgbyiq', 'rgrid2rcm',
-                'rhomb_trunc', 'rip_cape_2d', 'rip_cape_3d', 'round', 'rtest',
-                'runave', 'runave_n', 'set_default_fillvalue', 'set_sphere_radius',
-                'setfileoption', 'sfvp2uvf', 'sfvp2uvg', 'shaec', 'shagc',
-                'shgetnp', 'shgetp', 'shgrid', 'shorttobyte', 'shorttochar',
-                'shorttocharacter', 'show_ascii', 'shsec', 'shsetp', 'shsgc',
-                'shsgc_R42', 'sigma2hybrid', 'simpeq', 'simpne', 'sin',
-                'sindex_yrmo', 'sinh', 'sizeof', 'sleep', 'smth9', 'snindex_yrmo',
-                'solve_linsys', 'span_color_indexes', 'span_color_rgba',
-                'sparse_matrix_mult', 'spcorr', 'spcorr_n', 'specx_anal',
-                'specxy_anal', 'spei', 'sprintf', 'sprinti', 'sqrt', 'sqsort',
-                'srand', 'stat2', 'stat4', 'stat_medrng', 'stat_trim',
-                'status_exit', 'stdatmus_p2tdz', 'stdatmus_z2tdp', 'stddev',
-                'str_capital', 'str_concat', 'str_fields_count', 'str_get_cols',
-                'str_get_dq', 'str_get_field', 'str_get_nl', 'str_get_sq',
-                'str_get_tab', 'str_index_of_substr', 'str_insert', 'str_is_blank',
-                'str_join', 'str_left_strip', 'str_lower', 'str_match',
-                'str_match_ic', 'str_match_ic_regex', 'str_match_ind',
-                'str_match_ind_ic', 'str_match_ind_ic_regex', 'str_match_ind_regex',
-                'str_match_regex', 'str_right_strip', 'str_split',
-                'str_split_by_length', 'str_split_csv', 'str_squeeze', 'str_strip',
-                'str_sub_str', 'str_switch', 'str_upper', 'stringtochar',
-                'stringtocharacter', 'stringtodouble', 'stringtofloat',
-                'stringtoint', 'stringtointeger', 'stringtolong', 'stringtoshort',
-                'strlen', 'student_t', 'sum', 'svd_lapack', 'svdcov', 'svdcov_sv',
-                'svdstd', 'svdstd_sv', 'system', 'systemfunc', 'tan', 'tanh',
-                'taper', 'taper_n', 'tdclrs', 'tdctri', 'tdcudp', 'tdcurv',
-                'tddtri', 'tdez2d', 'tdez3d', 'tdgetp', 'tdgrds', 'tdgrid',
-                'tdgtrs', 'tdinit', 'tditri', 'tdlbla', 'tdlblp', 'tdlbls',
-                'tdline', 'tdlndp', 'tdlnpa', 'tdlpdp', 'tdmtri', 'tdotri',
-                'tdpara', 'tdplch', 'tdprpa', 'tdprpi', 'tdprpt', 'tdsetp',
-                'tdsort', 'tdstri', 'tdstrs', 'tdttri', 'thornthwaite', 'tobyte',
-                'tochar', 'todouble', 'tofloat', 'toint', 'toint64', 'tointeger',
-                'tolong', 'toshort', 'tosigned', 'tostring', 'tostring_with_format',
-                'totype', 'toubyte', 'touint', 'touint64', 'toulong', 'tounsigned',
-                'toushort', 'trend_manken', 'tri_trunc', 'triple2grid',
-                'triple2grid2d', 'trop_wmo', 'ttest', 'typeof', 'undef',
-                'unique_string', 'update', 'ushorttoint', 'ut_calendar',
-                'ut_inv_calendar', 'utm2latlon', 'uv2dv_cfd', 'uv2dvf', 'uv2dvg',
-                'uv2sfvpf', 'uv2sfvpg', 'uv2vr_cfd', 'uv2vrdvf', 'uv2vrdvg',
-                'uv2vrf', 'uv2vrg', 'v5d_close', 'v5d_create', 'v5d_setLowLev',
-                'v5d_setUnits', 'v5d_write', 'v5d_write_var', 'variance', 'vhaec',
-                'vhagc', 'vhsec', 'vhsgc', 'vibeta', 'vinth2p', 'vinth2p_ecmwf',
-                'vinth2p_ecmwf_nodes', 'vinth2p_nodes', 'vintp2p_ecmwf', 'vr2uvf',
-                'vr2uvg', 'vrdv2uvf', 'vrdv2uvg', 'wavelet', 'wavelet_default',
-                'weibull', 'wgt_area_smooth', 'wgt_areaave', 'wgt_areaave2',
-                'wgt_arearmse', 'wgt_arearmse2', 'wgt_areasum2', 'wgt_runave',
-                'wgt_runave_n', 'wgt_vert_avg_beta', 'wgt_volave', 'wgt_volave_ccm',
-                'wgt_volrmse', 'wgt_volrmse_ccm', 'where', 'wk_smooth121', 'wmbarb',
-                'wmbarbmap', 'wmdrft', 'wmgetp', 'wmlabs', 'wmsetp', 'wmstnm',
-                'wmvect', 'wmvectmap', 'wmvlbl', 'wrf_avo', 'wrf_cape_2d',
-                'wrf_cape_3d', 'wrf_dbz', 'wrf_eth', 'wrf_helicity', 'wrf_ij_to_ll',
-                'wrf_interp_1d', 'wrf_interp_2d_xy', 'wrf_interp_3d_z',
-                'wrf_latlon_to_ij', 'wrf_ll_to_ij', 'wrf_omega', 'wrf_pvo',
-                'wrf_rh', 'wrf_slp', 'wrf_smooth_2d', 'wrf_td', 'wrf_tk',
-                'wrf_updraft_helicity', 'wrf_uvmet', 'wrf_virtual_temp',
-                'wrf_wetbulb', 'wrf_wps_close_int', 'wrf_wps_open_int',
-                'wrf_wps_rddata_int', 'wrf_wps_rdhead_int', 'wrf_wps_read_int',
-                'wrf_wps_write_int', 'write_matrix', 'write_table', 'yiqrgb',
-                'z2geouv', 'zonal_mpsi', 'addfiles_GetVar', 'advect_variable',
-                'area_conserve_remap_Wrap', 'area_hi2lores_Wrap',
-                'array_append_record', 'assignFillValue', 'byte2flt',
-                'byte2flt_hdf', 'calcDayAnomTLL', 'calcMonAnomLLLT',
-                'calcMonAnomLLT', 'calcMonAnomTLL', 'calcMonAnomTLLL',
-                'calculate_monthly_values', 'cd_convert', 'changeCase',
-                'changeCaseChar', 'clmDayTLL', 'clmDayTLLL', 'clmMon2clmDay',
-                'clmMonLLLT', 'clmMonLLT', 'clmMonTLL', 'clmMonTLLL', 'closest_val',
-                'copy_VarAtts', 'copy_VarCoords', 'copy_VarCoords_1',
-                'copy_VarCoords_2', 'copy_VarMeta', 'copyatt', 'crossp3',
-                'cshstringtolist', 'cssgrid_Wrap', 'dble2flt', 'decimalPlaces',
-                'delete_VarAtts', 'dim_avg_n_Wrap', 'dim_avg_wgt_n_Wrap',
-                'dim_avg_wgt_Wrap', 'dim_avg_Wrap', 'dim_cumsum_n_Wrap',
-                'dim_cumsum_Wrap', 'dim_max_n_Wrap', 'dim_min_n_Wrap',
-                'dim_rmsd_n_Wrap', 'dim_rmsd_Wrap', 'dim_rmvmean_n_Wrap',
-                'dim_rmvmean_Wrap', 'dim_rmvmed_n_Wrap', 'dim_rmvmed_Wrap',
-                'dim_standardize_n_Wrap', 'dim_standardize_Wrap',
-                'dim_stddev_n_Wrap', 'dim_stddev_Wrap', 'dim_sum_n_Wrap',
-                'dim_sum_wgt_n_Wrap', 'dim_sum_wgt_Wrap', 'dim_sum_Wrap',
-                'dim_variance_n_Wrap', 'dim_variance_Wrap', 'dpres_plevel_Wrap',
-                'dtrend_leftdim', 'dv2uvF_Wrap', 'dv2uvG_Wrap', 'eof_north',
-                'eofcor_Wrap', 'eofcov_Wrap', 'eofunc_north', 'eofunc_ts_Wrap',
-                'eofunc_varimax_reorder', 'eofunc_varimax_Wrap', 'eofunc_Wrap',
-                'epsZero', 'f2fosh_Wrap', 'f2foshv_Wrap', 'f2fsh_Wrap',
-                'f2fshv_Wrap', 'f2gsh_Wrap', 'f2gshv_Wrap', 'fbindirSwap',
-                'fbinseqSwap1', 'fbinseqSwap2', 'flt2dble', 'flt2string',
-                'fo2fsh_Wrap', 'fo2fshv_Wrap', 'g2fsh_Wrap', 'g2fshv_Wrap',
-                'g2gsh_Wrap', 'g2gshv_Wrap', 'generate_resample_indices',
-                'generate_sample_indices', 'generate_unique_indices',
-                'genNormalDist', 'get1Dindex', 'get1Dindex_Collapse',
-                'get1Dindex_Exclude', 'get_file_suffix', 'GetFillColor',
-                'GetFillColorIndex', 'getFillValue', 'getind_latlon2d',
-                'getVarDimNames', 'getVarFillValue', 'grib_stime2itime',
-                'hyi2hyo_Wrap', 'ilapsF_Wrap', 'ilapsG_Wrap', 'ind_nearest_coord',
-                'indStrSubset', 'int2dble', 'int2flt', 'int2p_n_Wrap', 'int2p_Wrap',
-                'isMonotonic', 'isStrSubset', 'latGau', 'latGauWgt', 'latGlobeF',
-                'latGlobeFo', 'latRegWgt', 'linint1_n_Wrap', 'linint1_Wrap',
-                'linint2_points_Wrap', 'linint2_Wrap', 'local_max_1d',
-                'local_min_1d', 'lonFlip', 'lonGlobeF', 'lonGlobeFo', 'lonPivot',
-                'merge_levels_sfc', 'mod', 'month_to_annual',
-                'month_to_annual_weighted', 'month_to_season', 'month_to_season12',
-                'month_to_seasonN', 'monthly_total_to_daily_mean', 'nameDim',
-                'natgrid_Wrap', 'NewCosWeight', 'niceLatLon2D', 'NormCosWgtGlobe',
-                'numAsciiCol', 'numAsciiRow', 'numeric2int',
-                'obj_anal_ic_deprecated', 'obj_anal_ic_Wrap', 'omega_ccm_driver',
-                'omega_to_w', 'oneDtostring', 'pack_values', 'pattern_cor', 'pdfx',
-                'pdfxy', 'pdfxy_conform', 'pot_temp', 'pot_vort_hybrid',
-                'pot_vort_isobaric', 'pres2hybrid_Wrap', 'print_clock',
-                'printMinMax', 'quadroots', 'rcm2points_Wrap', 'rcm2rgrid_Wrap',
-                'readAsciiHead', 'readAsciiTable', 'reg_multlin_stats',
-                'region_ind', 'regline_stats', 'relhum_ttd', 'replaceSingleChar',
-                'RGBtoCmap', 'rgrid2rcm_Wrap', 'rho_mwjf', 'rm_single_dims',
-                'rmAnnCycle1D', 'rmInsufData', 'rmMonAnnCycLLLT', 'rmMonAnnCycLLT',
-                'rmMonAnnCycTLL', 'runave_n_Wrap', 'runave_Wrap', 'short2flt',
-                'short2flt_hdf', 'shsgc_R42_Wrap', 'sign_f90', 'sign_matlab',
-                'smth9_Wrap', 'smthClmDayTLL', 'smthClmDayTLLL', 'SqrtCosWeight',
-                'stat_dispersion', 'static_stability', 'stdMonLLLT', 'stdMonLLT',
-                'stdMonTLL', 'stdMonTLLL', 'symMinMaxPlt', 'table_attach_columns',
-                'table_attach_rows', 'time_to_newtime', 'transpose',
-                'triple2grid_Wrap', 'ut_convert', 'uv2dvF_Wrap', 'uv2dvG_Wrap',
-                'uv2vrF_Wrap', 'uv2vrG_Wrap', 'vr2uvF_Wrap', 'vr2uvG_Wrap',
-                'w_to_omega', 'wallClockElapseTime', 'wave_number_spc',
-                'wgt_areaave_Wrap', 'wgt_runave_leftdim', 'wgt_runave_n_Wrap',
-                'wgt_runave_Wrap', 'wgt_vertical_n', 'wind_component',
-                'wind_direction', 'yyyyddd_to_yyyymmdd', 'yyyymm_time',
-                'yyyymm_to_yyyyfrac', 'yyyymmdd_time', 'yyyymmdd_to_yyyyddd',
-                'yyyymmdd_to_yyyyfrac', 'yyyymmddhh_time', 'yyyymmddhh_to_yyyyfrac',
-                'zonal_mpsi_Wrap', 'zonalAve', 'calendar_decode2', 'cd_string',
-                'kf_filter', 'run_cor', 'time_axis_labels', 'ut_string',
-                'wrf_contour', 'wrf_map', 'wrf_map_overlay', 'wrf_map_overlays',
-                'wrf_map_resources', 'wrf_map_zoom', 'wrf_overlay', 'wrf_overlays',
-                'wrf_user_getvar', 'wrf_user_ij_to_ll', 'wrf_user_intrp2d',
-                'wrf_user_intrp3d', 'wrf_user_latlon_to_ij', 'wrf_user_list_times',
-                'wrf_user_ll_to_ij', 'wrf_user_unstagger', 'wrf_user_vert_interp',
-                'wrf_vector', 'gsn_add_annotation', 'gsn_add_polygon',
-                'gsn_add_polyline', 'gsn_add_polymarker',
-                'gsn_add_shapefile_polygons', 'gsn_add_shapefile_polylines',
-                'gsn_add_shapefile_polymarkers', 'gsn_add_text', 'gsn_attach_plots',
-                'gsn_blank_plot', 'gsn_contour', 'gsn_contour_map',
-                'gsn_contour_shade', 'gsn_coordinates', 'gsn_create_labelbar',
-                'gsn_create_legend', 'gsn_create_text',
-                'gsn_csm_attach_zonal_means', 'gsn_csm_blank_plot',
-                'gsn_csm_contour', 'gsn_csm_contour_map', 'gsn_csm_contour_map_ce',
-                'gsn_csm_contour_map_overlay', 'gsn_csm_contour_map_polar',
-                'gsn_csm_hov', 'gsn_csm_lat_time', 'gsn_csm_map', 'gsn_csm_map_ce',
-                'gsn_csm_map_polar', 'gsn_csm_pres_hgt',
-                'gsn_csm_pres_hgt_streamline', 'gsn_csm_pres_hgt_vector',
-                'gsn_csm_streamline', 'gsn_csm_streamline_contour_map',
-                'gsn_csm_streamline_contour_map_ce',
-                'gsn_csm_streamline_contour_map_polar', 'gsn_csm_streamline_map',
-                'gsn_csm_streamline_map_ce', 'gsn_csm_streamline_map_polar',
-                'gsn_csm_streamline_scalar', 'gsn_csm_streamline_scalar_map',
-                'gsn_csm_streamline_scalar_map_ce',
-                'gsn_csm_streamline_scalar_map_polar', 'gsn_csm_time_lat',
-                'gsn_csm_vector', 'gsn_csm_vector_map', 'gsn_csm_vector_map_ce',
-                'gsn_csm_vector_map_polar', 'gsn_csm_vector_scalar',
-                'gsn_csm_vector_scalar_map', 'gsn_csm_vector_scalar_map_ce',
-                'gsn_csm_vector_scalar_map_polar', 'gsn_csm_x2y', 'gsn_csm_x2y2',
-                'gsn_csm_xy', 'gsn_csm_xy2', 'gsn_csm_xy3', 'gsn_csm_y',
-                'gsn_define_colormap', 'gsn_draw_colormap', 'gsn_draw_named_colors',
-                'gsn_histogram', 'gsn_labelbar_ndc', 'gsn_legend_ndc', 'gsn_map',
-                'gsn_merge_colormaps', 'gsn_open_wks', 'gsn_panel', 'gsn_polygon',
-                'gsn_polygon_ndc', 'gsn_polyline', 'gsn_polyline_ndc',
-                'gsn_polymarker', 'gsn_polymarker_ndc', 'gsn_retrieve_colormap',
-                'gsn_reverse_colormap', 'gsn_streamline', 'gsn_streamline_map',
-                'gsn_streamline_scalar', 'gsn_streamline_scalar_map', 'gsn_table',
-                'gsn_text', 'gsn_text_ndc', 'gsn_vector', 'gsn_vector_map',
-                'gsn_vector_scalar', 'gsn_vector_scalar_map', 'gsn_xy', 'gsn_y',
-                'hsv2rgb', 'maximize_output', 'namedcolor2rgb', 'namedcolor2rgba',
-                'reset_device_coordinates', 'span_named_colors'), prefix=r'\b'),
-             Name.Builtin),
-
-            # Resources
-            (words((
-                'amDataXF', 'amDataYF', 'amJust', 'amOn', 'amOrthogonalPosF',
-                'amParallelPosF', 'amResizeNotify', 'amSide', 'amTrackData',
-                'amViewId', 'amZone', 'appDefaultParent', 'appFileSuffix',
-                'appResources', 'appSysDir', 'appUsrDir', 'caCopyArrays',
-                'caXArray', 'caXCast', 'caXMaxV', 'caXMinV', 'caXMissingV',
-                'caYArray', 'caYCast', 'caYMaxV', 'caYMinV', 'caYMissingV',
-                'cnCellFillEdgeColor', 'cnCellFillMissingValEdgeColor',
-                'cnConpackParams', 'cnConstFEnableFill', 'cnConstFLabelAngleF',
-                'cnConstFLabelBackgroundColor', 'cnConstFLabelConstantSpacingF',
-                'cnConstFLabelFont', 'cnConstFLabelFontAspectF',
-                'cnConstFLabelFontColor', 'cnConstFLabelFontHeightF',
-                'cnConstFLabelFontQuality', 'cnConstFLabelFontThicknessF',
-                'cnConstFLabelFormat', 'cnConstFLabelFuncCode', 'cnConstFLabelJust',
-                'cnConstFLabelOn', 'cnConstFLabelOrthogonalPosF',
-                'cnConstFLabelParallelPosF', 'cnConstFLabelPerimColor',
-                'cnConstFLabelPerimOn', 'cnConstFLabelPerimSpaceF',
-                'cnConstFLabelPerimThicknessF', 'cnConstFLabelSide',
-                'cnConstFLabelString', 'cnConstFLabelTextDirection',
-                'cnConstFLabelZone', 'cnConstFUseInfoLabelRes',
-                'cnExplicitLabelBarLabelsOn', 'cnExplicitLegendLabelsOn',
-                'cnExplicitLineLabelsOn', 'cnFillBackgroundColor', 'cnFillColor',
-                'cnFillColors', 'cnFillDotSizeF', 'cnFillDrawOrder', 'cnFillMode',
-                'cnFillOn', 'cnFillOpacityF', 'cnFillPalette', 'cnFillPattern',
-                'cnFillPatterns', 'cnFillScaleF', 'cnFillScales', 'cnFixFillBleed',
-                'cnGridBoundFillColor', 'cnGridBoundFillPattern',
-                'cnGridBoundFillScaleF', 'cnGridBoundPerimColor',
-                'cnGridBoundPerimDashPattern', 'cnGridBoundPerimOn',
-                'cnGridBoundPerimThicknessF', 'cnHighLabelAngleF',
-                'cnHighLabelBackgroundColor', 'cnHighLabelConstantSpacingF',
-                'cnHighLabelCount', 'cnHighLabelFont', 'cnHighLabelFontAspectF',
-                'cnHighLabelFontColor', 'cnHighLabelFontHeightF',
-                'cnHighLabelFontQuality', 'cnHighLabelFontThicknessF',
-                'cnHighLabelFormat', 'cnHighLabelFuncCode', 'cnHighLabelPerimColor',
-                'cnHighLabelPerimOn', 'cnHighLabelPerimSpaceF',
-                'cnHighLabelPerimThicknessF', 'cnHighLabelString', 'cnHighLabelsOn',
-                'cnHighLowLabelOverlapMode', 'cnHighUseLineLabelRes',
-                'cnInfoLabelAngleF', 'cnInfoLabelBackgroundColor',
-                'cnInfoLabelConstantSpacingF', 'cnInfoLabelFont',
-                'cnInfoLabelFontAspectF', 'cnInfoLabelFontColor',
-                'cnInfoLabelFontHeightF', 'cnInfoLabelFontQuality',
-                'cnInfoLabelFontThicknessF', 'cnInfoLabelFormat',
-                'cnInfoLabelFuncCode', 'cnInfoLabelJust', 'cnInfoLabelOn',
-                'cnInfoLabelOrthogonalPosF', 'cnInfoLabelParallelPosF',
-                'cnInfoLabelPerimColor', 'cnInfoLabelPerimOn',
-                'cnInfoLabelPerimSpaceF', 'cnInfoLabelPerimThicknessF',
-                'cnInfoLabelSide', 'cnInfoLabelString', 'cnInfoLabelTextDirection',
-                'cnInfoLabelZone', 'cnLabelBarEndLabelsOn', 'cnLabelBarEndStyle',
-                'cnLabelDrawOrder', 'cnLabelMasking', 'cnLabelScaleFactorF',
-                'cnLabelScaleValueF', 'cnLabelScalingMode', 'cnLegendLevelFlags',
-                'cnLevelCount', 'cnLevelFlag', 'cnLevelFlags', 'cnLevelSelectionMode',
-                'cnLevelSpacingF', 'cnLevels', 'cnLineColor', 'cnLineColors',
-                'cnLineDashPattern', 'cnLineDashPatterns', 'cnLineDashSegLenF',
-                'cnLineDrawOrder', 'cnLineLabelAngleF', 'cnLineLabelBackgroundColor',
-                'cnLineLabelConstantSpacingF', 'cnLineLabelCount',
-                'cnLineLabelDensityF', 'cnLineLabelFont', 'cnLineLabelFontAspectF',
-                'cnLineLabelFontColor', 'cnLineLabelFontColors',
-                'cnLineLabelFontHeightF', 'cnLineLabelFontQuality',
-                'cnLineLabelFontThicknessF', 'cnLineLabelFormat',
-                'cnLineLabelFuncCode', 'cnLineLabelInterval', 'cnLineLabelPerimColor',
-                'cnLineLabelPerimOn', 'cnLineLabelPerimSpaceF',
-                'cnLineLabelPerimThicknessF', 'cnLineLabelPlacementMode',
-                'cnLineLabelStrings', 'cnLineLabelsOn', 'cnLinePalette',
-                'cnLineThicknessF', 'cnLineThicknesses', 'cnLinesOn',
-                'cnLowLabelAngleF', 'cnLowLabelBackgroundColor',
-                'cnLowLabelConstantSpacingF', 'cnLowLabelCount', 'cnLowLabelFont',
-                'cnLowLabelFontAspectF', 'cnLowLabelFontColor',
-                'cnLowLabelFontHeightF', 'cnLowLabelFontQuality',
-                'cnLowLabelFontThicknessF', 'cnLowLabelFormat', 'cnLowLabelFuncCode',
-                'cnLowLabelPerimColor', 'cnLowLabelPerimOn', 'cnLowLabelPerimSpaceF',
-                'cnLowLabelPerimThicknessF', 'cnLowLabelString', 'cnLowLabelsOn',
-                'cnLowUseHighLabelRes', 'cnMaxDataValueFormat', 'cnMaxLevelCount',
-                'cnMaxLevelValF', 'cnMaxPointDistanceF', 'cnMinLevelValF',
-                'cnMissingValFillColor', 'cnMissingValFillPattern',
-                'cnMissingValFillScaleF', 'cnMissingValPerimColor',
-                'cnMissingValPerimDashPattern', 'cnMissingValPerimGridBoundOn',
-                'cnMissingValPerimOn', 'cnMissingValPerimThicknessF',
-                'cnMonoFillColor', 'cnMonoFillPattern', 'cnMonoFillScale',
-                'cnMonoLevelFlag', 'cnMonoLineColor', 'cnMonoLineDashPattern',
-                'cnMonoLineLabelFontColor', 'cnMonoLineThickness', 'cnNoDataLabelOn',
-                'cnNoDataLabelString', 'cnOutOfRangeFillColor',
-                'cnOutOfRangeFillPattern', 'cnOutOfRangeFillScaleF',
-                'cnOutOfRangePerimColor', 'cnOutOfRangePerimDashPattern',
-                'cnOutOfRangePerimOn', 'cnOutOfRangePerimThicknessF',
-                'cnRasterCellSizeF', 'cnRasterMinCellSizeF', 'cnRasterModeOn',
-                'cnRasterSampleFactorF', 'cnRasterSmoothingOn', 'cnScalarFieldData',
-                'cnSmoothingDistanceF', 'cnSmoothingOn', 'cnSmoothingTensionF',
-                'cnSpanFillPalette', 'cnSpanLinePalette', 'ctCopyTables',
-                'ctXElementSize', 'ctXMaxV', 'ctXMinV', 'ctXMissingV', 'ctXTable',
-                'ctXTableLengths', 'ctXTableType', 'ctYElementSize', 'ctYMaxV',
-                'ctYMinV', 'ctYMissingV', 'ctYTable', 'ctYTableLengths',
-                'ctYTableType', 'dcDelayCompute', 'errBuffer',
-                'errFileName', 'errFilePtr', 'errLevel', 'errPrint', 'errUnitNumber',
-                'gsClipOn', 'gsColors', 'gsEdgeColor', 'gsEdgeDashPattern',
-                'gsEdgeDashSegLenF', 'gsEdgeThicknessF', 'gsEdgesOn',
-                'gsFillBackgroundColor', 'gsFillColor', 'gsFillDotSizeF',
-                'gsFillIndex', 'gsFillLineThicknessF', 'gsFillOpacityF',
-                'gsFillScaleF', 'gsFont', 'gsFontAspectF', 'gsFontColor',
-                'gsFontHeightF', 'gsFontOpacityF', 'gsFontQuality',
-                'gsFontThicknessF', 'gsLineColor', 'gsLineDashPattern',
-                'gsLineDashSegLenF', 'gsLineLabelConstantSpacingF', 'gsLineLabelFont',
-                'gsLineLabelFontAspectF', 'gsLineLabelFontColor',
-                'gsLineLabelFontHeightF', 'gsLineLabelFontQuality',
-                'gsLineLabelFontThicknessF', 'gsLineLabelFuncCode',
-                'gsLineLabelString', 'gsLineOpacityF', 'gsLineThicknessF',
-                'gsMarkerColor', 'gsMarkerIndex', 'gsMarkerOpacityF', 'gsMarkerSizeF',
-                'gsMarkerThicknessF', 'gsSegments', 'gsTextAngleF',
-                'gsTextConstantSpacingF', 'gsTextDirection', 'gsTextFuncCode',
-                'gsTextJustification', 'gsnAboveYRefLineBarColors',
-                'gsnAboveYRefLineBarFillScales', 'gsnAboveYRefLineBarPatterns',
-                'gsnAboveYRefLineColor', 'gsnAddCyclic', 'gsnAttachBorderOn',
-                'gsnAttachPlotsXAxis', 'gsnBelowYRefLineBarColors',
-                'gsnBelowYRefLineBarFillScales', 'gsnBelowYRefLineBarPatterns',
-                'gsnBelowYRefLineColor', 'gsnBoxMargin', 'gsnCenterString',
-                'gsnCenterStringFontColor', 'gsnCenterStringFontHeightF',
-                'gsnCenterStringFuncCode', 'gsnCenterStringOrthogonalPosF',
-                'gsnCenterStringParallelPosF', 'gsnContourLineThicknessesScale',
-                'gsnContourNegLineDashPattern', 'gsnContourPosLineDashPattern',
-                'gsnContourZeroLineThicknessF', 'gsnDebugWriteFileName', 'gsnDraw',
-                'gsnFrame', 'gsnHistogramBarWidthPercent', 'gsnHistogramBinIntervals',
-                'gsnHistogramBinMissing', 'gsnHistogramBinWidth',
-                'gsnHistogramClassIntervals', 'gsnHistogramCompare',
-                'gsnHistogramComputePercentages',
-                'gsnHistogramComputePercentagesNoMissing',
-                'gsnHistogramDiscreteBinValues', 'gsnHistogramDiscreteClassValues',
-                'gsnHistogramHorizontal', 'gsnHistogramMinMaxBinsOn',
-                'gsnHistogramNumberOfBins', 'gsnHistogramPercentSign',
-                'gsnHistogramSelectNiceIntervals', 'gsnLeftString',
-                'gsnLeftStringFontColor', 'gsnLeftStringFontHeightF',
-                'gsnLeftStringFuncCode', 'gsnLeftStringOrthogonalPosF',
-                'gsnLeftStringParallelPosF', 'gsnMajorLatSpacing',
-                'gsnMajorLonSpacing', 'gsnMaskLambertConformal',
-                'gsnMaskLambertConformalOutlineOn', 'gsnMaximize',
-                'gsnMinorLatSpacing', 'gsnMinorLonSpacing', 'gsnPanelBottom',
-                'gsnPanelCenter', 'gsnPanelDebug', 'gsnPanelFigureStrings',
-                'gsnPanelFigureStringsBackgroundFillColor',
-                'gsnPanelFigureStringsFontHeightF', 'gsnPanelFigureStringsJust',
-                'gsnPanelFigureStringsPerimOn', 'gsnPanelLabelBar', 'gsnPanelLeft',
-                'gsnPanelMainFont', 'gsnPanelMainFontColor',
-                'gsnPanelMainFontHeightF', 'gsnPanelMainString', 'gsnPanelRight',
-                'gsnPanelRowSpec', 'gsnPanelScalePlotIndex', 'gsnPanelTop',
-                'gsnPanelXF', 'gsnPanelXWhiteSpacePercent', 'gsnPanelYF',
-                'gsnPanelYWhiteSpacePercent', 'gsnPaperHeight', 'gsnPaperMargin',
-                'gsnPaperOrientation', 'gsnPaperWidth', 'gsnPolar',
-                'gsnPolarLabelDistance', 'gsnPolarLabelFont',
-                'gsnPolarLabelFontHeightF', 'gsnPolarLabelSpacing', 'gsnPolarTime',
-                'gsnPolarUT', 'gsnRightString', 'gsnRightStringFontColor',
-                'gsnRightStringFontHeightF', 'gsnRightStringFuncCode',
-                'gsnRightStringOrthogonalPosF', 'gsnRightStringParallelPosF',
-                'gsnScalarContour', 'gsnScale', 'gsnShape', 'gsnSpreadColorEnd',
-                'gsnSpreadColorStart', 'gsnSpreadColors', 'gsnStringFont',
-                'gsnStringFontColor', 'gsnStringFontHeightF', 'gsnStringFuncCode',
-                'gsnTickMarksOn', 'gsnXAxisIrregular2Linear', 'gsnXAxisIrregular2Log',
-                'gsnXRefLine', 'gsnXRefLineColor', 'gsnXRefLineDashPattern',
-                'gsnXRefLineThicknessF', 'gsnXYAboveFillColors', 'gsnXYBarChart',
-                'gsnXYBarChartBarWidth', 'gsnXYBarChartColors',
-                'gsnXYBarChartColors2', 'gsnXYBarChartFillDotSizeF',
-                'gsnXYBarChartFillLineThicknessF', 'gsnXYBarChartFillOpacityF',
-                'gsnXYBarChartFillScaleF', 'gsnXYBarChartOutlineOnly',
-                'gsnXYBarChartOutlineThicknessF', 'gsnXYBarChartPatterns',
-                'gsnXYBarChartPatterns2', 'gsnXYBelowFillColors', 'gsnXYFillColors',
-                'gsnXYFillOpacities', 'gsnXYLeftFillColors', 'gsnXYRightFillColors',
-                'gsnYAxisIrregular2Linear', 'gsnYAxisIrregular2Log', 'gsnYRefLine',
-                'gsnYRefLineColor', 'gsnYRefLineColors', 'gsnYRefLineDashPattern',
-                'gsnYRefLineDashPatterns', 'gsnYRefLineThicknessF',
-                'gsnYRefLineThicknesses', 'gsnZonalMean', 'gsnZonalMeanXMaxF',
-                'gsnZonalMeanXMinF', 'gsnZonalMeanYRefLine', 'lbAutoManage',
-                'lbBottomMarginF', 'lbBoxCount', 'lbBoxEndCapStyle', 'lbBoxFractions',
-                'lbBoxLineColor', 'lbBoxLineDashPattern', 'lbBoxLineDashSegLenF',
-                'lbBoxLineThicknessF', 'lbBoxLinesOn', 'lbBoxMajorExtentF',
-                'lbBoxMinorExtentF', 'lbBoxSeparatorLinesOn', 'lbBoxSizing',
-                'lbFillBackground', 'lbFillColor', 'lbFillColors', 'lbFillDotSizeF',
-                'lbFillLineThicknessF', 'lbFillPattern', 'lbFillPatterns',
-                'lbFillScaleF', 'lbFillScales', 'lbJustification', 'lbLabelAlignment',
-                'lbLabelAngleF', 'lbLabelAutoStride', 'lbLabelBarOn',
-                'lbLabelConstantSpacingF', 'lbLabelDirection', 'lbLabelFont',
-                'lbLabelFontAspectF', 'lbLabelFontColor', 'lbLabelFontHeightF',
-                'lbLabelFontQuality', 'lbLabelFontThicknessF', 'lbLabelFuncCode',
-                'lbLabelJust', 'lbLabelOffsetF', 'lbLabelPosition', 'lbLabelStride',
-                'lbLabelStrings', 'lbLabelsOn', 'lbLeftMarginF', 'lbMaxLabelLenF',
-                'lbMinLabelSpacingF', 'lbMonoFillColor', 'lbMonoFillPattern',
-                'lbMonoFillScale', 'lbOrientation', 'lbPerimColor',
-                'lbPerimDashPattern', 'lbPerimDashSegLenF', 'lbPerimFill',
-                'lbPerimFillColor', 'lbPerimOn', 'lbPerimThicknessF',
-                'lbRasterFillOn', 'lbRightMarginF', 'lbTitleAngleF',
-                'lbTitleConstantSpacingF', 'lbTitleDirection', 'lbTitleExtentF',
-                'lbTitleFont', 'lbTitleFontAspectF', 'lbTitleFontColor',
-                'lbTitleFontHeightF', 'lbTitleFontQuality', 'lbTitleFontThicknessF',
-                'lbTitleFuncCode', 'lbTitleJust', 'lbTitleOffsetF', 'lbTitleOn',
-                'lbTitlePosition', 'lbTitleString', 'lbTopMarginF', 'lgAutoManage',
-                'lgBottomMarginF', 'lgBoxBackground', 'lgBoxLineColor',
-                'lgBoxLineDashPattern', 'lgBoxLineDashSegLenF', 'lgBoxLineThicknessF',
-                'lgBoxLinesOn', 'lgBoxMajorExtentF', 'lgBoxMinorExtentF',
-                'lgDashIndex', 'lgDashIndexes', 'lgItemCount', 'lgItemOrder',
-                'lgItemPlacement', 'lgItemPositions', 'lgItemType', 'lgItemTypes',
-                'lgJustification', 'lgLabelAlignment', 'lgLabelAngleF',
-                'lgLabelAutoStride', 'lgLabelConstantSpacingF', 'lgLabelDirection',
-                'lgLabelFont', 'lgLabelFontAspectF', 'lgLabelFontColor',
-                'lgLabelFontHeightF', 'lgLabelFontQuality', 'lgLabelFontThicknessF',
-                'lgLabelFuncCode', 'lgLabelJust', 'lgLabelOffsetF', 'lgLabelPosition',
-                'lgLabelStride', 'lgLabelStrings', 'lgLabelsOn', 'lgLeftMarginF',
-                'lgLegendOn', 'lgLineColor', 'lgLineColors', 'lgLineDashSegLenF',
-                'lgLineDashSegLens', 'lgLineLabelConstantSpacingF', 'lgLineLabelFont',
-                'lgLineLabelFontAspectF', 'lgLineLabelFontColor',
-                'lgLineLabelFontColors', 'lgLineLabelFontHeightF',
-                'lgLineLabelFontHeights', 'lgLineLabelFontQuality',
-                'lgLineLabelFontThicknessF', 'lgLineLabelFuncCode',
-                'lgLineLabelStrings', 'lgLineLabelsOn', 'lgLineThicknessF',
-                'lgLineThicknesses', 'lgMarkerColor', 'lgMarkerColors',
-                'lgMarkerIndex', 'lgMarkerIndexes', 'lgMarkerSizeF', 'lgMarkerSizes',
-                'lgMarkerThicknessF', 'lgMarkerThicknesses', 'lgMonoDashIndex',
-                'lgMonoItemType', 'lgMonoLineColor', 'lgMonoLineDashSegLen',
-                'lgMonoLineLabelFontColor', 'lgMonoLineLabelFontHeight',
-                'lgMonoLineThickness', 'lgMonoMarkerColor', 'lgMonoMarkerIndex',
-                'lgMonoMarkerSize', 'lgMonoMarkerThickness', 'lgOrientation',
-                'lgPerimColor', 'lgPerimDashPattern', 'lgPerimDashSegLenF',
-                'lgPerimFill', 'lgPerimFillColor', 'lgPerimOn', 'lgPerimThicknessF',
-                'lgRightMarginF', 'lgTitleAngleF', 'lgTitleConstantSpacingF',
-                'lgTitleDirection', 'lgTitleExtentF', 'lgTitleFont',
-                'lgTitleFontAspectF', 'lgTitleFontColor', 'lgTitleFontHeightF',
-                'lgTitleFontQuality', 'lgTitleFontThicknessF', 'lgTitleFuncCode',
-                'lgTitleJust', 'lgTitleOffsetF', 'lgTitleOn', 'lgTitlePosition',
-                'lgTitleString', 'lgTopMarginF', 'mpAreaGroupCount',
-                'mpAreaMaskingOn', 'mpAreaNames', 'mpAreaTypes', 'mpBottomAngleF',
-                'mpBottomMapPosF', 'mpBottomNDCF', 'mpBottomNPCF',
-                'mpBottomPointLatF', 'mpBottomPointLonF', 'mpBottomWindowF',
-                'mpCenterLatF', 'mpCenterLonF', 'mpCenterRotF', 'mpCountyLineColor',
-                'mpCountyLineDashPattern', 'mpCountyLineDashSegLenF',
-                'mpCountyLineThicknessF', 'mpDataBaseVersion', 'mpDataResolution',
-                'mpDataSetName', 'mpDefaultFillColor', 'mpDefaultFillPattern',
-                'mpDefaultFillScaleF', 'mpDynamicAreaGroups', 'mpEllipticalBoundary',
-                'mpFillAreaSpecifiers', 'mpFillBoundarySets', 'mpFillColor',
-                'mpFillColors', 'mpFillColors-default', 'mpFillDotSizeF',
-                'mpFillDrawOrder', 'mpFillOn', 'mpFillPatternBackground',
-                'mpFillPattern', 'mpFillPatterns', 'mpFillPatterns-default',
-                'mpFillScaleF', 'mpFillScales', 'mpFillScales-default',
-                'mpFixedAreaGroups', 'mpGeophysicalLineColor',
-                'mpGeophysicalLineDashPattern', 'mpGeophysicalLineDashSegLenF',
-                'mpGeophysicalLineThicknessF', 'mpGreatCircleLinesOn',
-                'mpGridAndLimbDrawOrder', 'mpGridAndLimbOn', 'mpGridLatSpacingF',
-                'mpGridLineColor', 'mpGridLineDashPattern', 'mpGridLineDashSegLenF',
-                'mpGridLineThicknessF', 'mpGridLonSpacingF', 'mpGridMaskMode',
-                'mpGridMaxLatF', 'mpGridPolarLonSpacingF', 'mpGridSpacingF',
-                'mpInlandWaterFillColor', 'mpInlandWaterFillPattern',
-                'mpInlandWaterFillScaleF', 'mpLabelDrawOrder', 'mpLabelFontColor',
-                'mpLabelFontHeightF', 'mpLabelsOn', 'mpLambertMeridianF',
-                'mpLambertParallel1F', 'mpLambertParallel2F', 'mpLandFillColor',
-                'mpLandFillPattern', 'mpLandFillScaleF', 'mpLeftAngleF',
-                'mpLeftCornerLatF', 'mpLeftCornerLonF', 'mpLeftMapPosF',
-                'mpLeftNDCF', 'mpLeftNPCF', 'mpLeftPointLatF',
-                'mpLeftPointLonF', 'mpLeftWindowF', 'mpLimbLineColor',
-                'mpLimbLineDashPattern', 'mpLimbLineDashSegLenF',
-                'mpLimbLineThicknessF', 'mpLimitMode', 'mpMaskAreaSpecifiers',
-                'mpMaskOutlineSpecifiers', 'mpMaxLatF', 'mpMaxLonF',
-                'mpMinLatF', 'mpMinLonF', 'mpMonoFillColor', 'mpMonoFillPattern',
-                'mpMonoFillScale', 'mpNationalLineColor', 'mpNationalLineDashPattern',
-                'mpNationalLineThicknessF', 'mpOceanFillColor', 'mpOceanFillPattern',
-                'mpOceanFillScaleF', 'mpOutlineBoundarySets', 'mpOutlineDrawOrder',
-                'mpOutlineMaskingOn', 'mpOutlineOn', 'mpOutlineSpecifiers',
-                'mpPerimDrawOrder', 'mpPerimLineColor', 'mpPerimLineDashPattern',
-                'mpPerimLineDashSegLenF', 'mpPerimLineThicknessF', 'mpPerimOn',
-                'mpPolyMode', 'mpProjection', 'mpProvincialLineColor',
-                'mpProvincialLineDashPattern', 'mpProvincialLineDashSegLenF',
-                'mpProvincialLineThicknessF', 'mpRelativeCenterLat',
-                'mpRelativeCenterLon', 'mpRightAngleF', 'mpRightCornerLatF',
-                'mpRightCornerLonF', 'mpRightMapPosF', 'mpRightNDCF',
-                'mpRightNPCF', 'mpRightPointLatF', 'mpRightPointLonF',
-                'mpRightWindowF', 'mpSatelliteAngle1F', 'mpSatelliteAngle2F',
-                'mpSatelliteDistF', 'mpShapeMode', 'mpSpecifiedFillColors',
-                'mpSpecifiedFillDirectIndexing', 'mpSpecifiedFillPatterns',
-                'mpSpecifiedFillPriority', 'mpSpecifiedFillScales',
-                'mpTopAngleF', 'mpTopMapPosF', 'mpTopNDCF', 'mpTopNPCF',
-                'mpTopPointLatF', 'mpTopPointLonF', 'mpTopWindowF',
-                'mpUSStateLineColor', 'mpUSStateLineDashPattern',
-                'mpUSStateLineDashSegLenF', 'mpUSStateLineThicknessF',
-                'pmAnnoManagers', 'pmAnnoViews', 'pmLabelBarDisplayMode',
-                'pmLabelBarHeightF', 'pmLabelBarKeepAspect', 'pmLabelBarOrthogonalPosF',
-                'pmLabelBarParallelPosF', 'pmLabelBarSide', 'pmLabelBarWidthF',
-                'pmLabelBarZone', 'pmLegendDisplayMode', 'pmLegendHeightF',
-                'pmLegendKeepAspect', 'pmLegendOrthogonalPosF',
-                'pmLegendParallelPosF', 'pmLegendSide', 'pmLegendWidthF',
-                'pmLegendZone', 'pmOverlaySequenceIds', 'pmTickMarkDisplayMode',
-                'pmTickMarkZone', 'pmTitleDisplayMode', 'pmTitleZone',
-                'prGraphicStyle', 'prPolyType', 'prXArray', 'prYArray',
-                'sfCopyData', 'sfDataArray', 'sfDataMaxV', 'sfDataMinV',
-                'sfElementNodes', 'sfExchangeDimensions', 'sfFirstNodeIndex',
-                'sfMissingValueV', 'sfXArray', 'sfXCActualEndF', 'sfXCActualStartF',
-                'sfXCEndIndex', 'sfXCEndSubsetV', 'sfXCEndV', 'sfXCStartIndex',
-                'sfXCStartSubsetV', 'sfXCStartV', 'sfXCStride', 'sfXCellBounds',
-                'sfYArray', 'sfYCActualEndF', 'sfYCActualStartF', 'sfYCEndIndex',
-                'sfYCEndSubsetV', 'sfYCEndV', 'sfYCStartIndex', 'sfYCStartSubsetV',
-                'sfYCStartV', 'sfYCStride', 'sfYCellBounds', 'stArrowLengthF',
-                'stArrowStride', 'stCrossoverCheckCount',
-                'stExplicitLabelBarLabelsOn', 'stLabelBarEndLabelsOn',
-                'stLabelFormat', 'stLengthCheckCount', 'stLevelColors',
-                'stLevelCount', 'stLevelPalette', 'stLevelSelectionMode',
-                'stLevelSpacingF', 'stLevels', 'stLineColor', 'stLineOpacityF',
-                'stLineStartStride', 'stLineThicknessF', 'stMapDirection',
-                'stMaxLevelCount', 'stMaxLevelValF', 'stMinArrowSpacingF',
-                'stMinDistanceF', 'stMinLevelValF', 'stMinLineSpacingF',
-                'stMinStepFactorF', 'stMonoLineColor', 'stNoDataLabelOn',
-                'stNoDataLabelString', 'stScalarFieldData', 'stScalarMissingValColor',
-                'stSpanLevelPalette', 'stStepSizeF', 'stStreamlineDrawOrder',
-                'stUseScalarArray', 'stVectorFieldData', 'stZeroFLabelAngleF',
-                'stZeroFLabelBackgroundColor', 'stZeroFLabelConstantSpacingF',
-                'stZeroFLabelFont', 'stZeroFLabelFontAspectF',
-                'stZeroFLabelFontColor', 'stZeroFLabelFontHeightF',
-                'stZeroFLabelFontQuality', 'stZeroFLabelFontThicknessF',
-                'stZeroFLabelFuncCode', 'stZeroFLabelJust', 'stZeroFLabelOn',
-                'stZeroFLabelOrthogonalPosF', 'stZeroFLabelParallelPosF',
-                'stZeroFLabelPerimColor', 'stZeroFLabelPerimOn',
-                'stZeroFLabelPerimSpaceF', 'stZeroFLabelPerimThicknessF',
-                'stZeroFLabelSide', 'stZeroFLabelString', 'stZeroFLabelTextDirection',
-                'stZeroFLabelZone', 'tfDoNDCOverlay', 'tfPlotManagerOn',
-                'tfPolyDrawList', 'tfPolyDrawOrder', 'tiDeltaF', 'tiMainAngleF',
-                'tiMainConstantSpacingF', 'tiMainDirection', 'tiMainFont',
-                'tiMainFontAspectF', 'tiMainFontColor', 'tiMainFontHeightF',
-                'tiMainFontQuality', 'tiMainFontThicknessF', 'tiMainFuncCode',
-                'tiMainJust', 'tiMainOffsetXF', 'tiMainOffsetYF', 'tiMainOn',
-                'tiMainPosition', 'tiMainSide', 'tiMainString', 'tiUseMainAttributes',
-                'tiXAxisAngleF', 'tiXAxisConstantSpacingF', 'tiXAxisDirection',
-                'tiXAxisFont', 'tiXAxisFontAspectF', 'tiXAxisFontColor',
-                'tiXAxisFontHeightF', 'tiXAxisFontQuality', 'tiXAxisFontThicknessF',
-                'tiXAxisFuncCode', 'tiXAxisJust', 'tiXAxisOffsetXF',
-                'tiXAxisOffsetYF', 'tiXAxisOn', 'tiXAxisPosition', 'tiXAxisSide',
-                'tiXAxisString', 'tiYAxisAngleF', 'tiYAxisConstantSpacingF',
-                'tiYAxisDirection', 'tiYAxisFont', 'tiYAxisFontAspectF',
-                'tiYAxisFontColor', 'tiYAxisFontHeightF', 'tiYAxisFontQuality',
-                'tiYAxisFontThicknessF', 'tiYAxisFuncCode', 'tiYAxisJust',
-                'tiYAxisOffsetXF', 'tiYAxisOffsetYF', 'tiYAxisOn', 'tiYAxisPosition',
-                'tiYAxisSide', 'tiYAxisString', 'tmBorderLineColor',
-                'tmBorderThicknessF', 'tmEqualizeXYSizes', 'tmLabelAutoStride',
-                'tmSciNoteCutoff', 'tmXBAutoPrecision', 'tmXBBorderOn',
-                'tmXBDataLeftF', 'tmXBDataRightF', 'tmXBFormat', 'tmXBIrrTensionF',
-                'tmXBIrregularPoints', 'tmXBLabelAngleF', 'tmXBLabelConstantSpacingF',
-                'tmXBLabelDeltaF', 'tmXBLabelDirection', 'tmXBLabelFont',
-                'tmXBLabelFontAspectF', 'tmXBLabelFontColor', 'tmXBLabelFontHeightF',
-                'tmXBLabelFontQuality', 'tmXBLabelFontThicknessF',
-                'tmXBLabelFuncCode', 'tmXBLabelJust', 'tmXBLabelStride', 'tmXBLabels',
-                'tmXBLabelsOn', 'tmXBMajorLengthF', 'tmXBMajorLineColor',
-                'tmXBMajorOutwardLengthF', 'tmXBMajorThicknessF', 'tmXBMaxLabelLenF',
-                'tmXBMaxTicks', 'tmXBMinLabelSpacingF', 'tmXBMinorLengthF',
-                'tmXBMinorLineColor', 'tmXBMinorOn', 'tmXBMinorOutwardLengthF',
-                'tmXBMinorPerMajor', 'tmXBMinorThicknessF', 'tmXBMinorValues',
-                'tmXBMode', 'tmXBOn', 'tmXBPrecision', 'tmXBStyle', 'tmXBTickEndF',
-                'tmXBTickSpacingF', 'tmXBTickStartF', 'tmXBValues', 'tmXMajorGrid',
-                'tmXMajorGridLineColor', 'tmXMajorGridLineDashPattern',
-                'tmXMajorGridThicknessF', 'tmXMinorGrid', 'tmXMinorGridLineColor',
-                'tmXMinorGridLineDashPattern', 'tmXMinorGridThicknessF',
-                'tmXTAutoPrecision', 'tmXTBorderOn', 'tmXTDataLeftF',
-                'tmXTDataRightF', 'tmXTFormat', 'tmXTIrrTensionF',
-                'tmXTIrregularPoints', 'tmXTLabelAngleF', 'tmXTLabelConstantSpacingF',
-                'tmXTLabelDeltaF', 'tmXTLabelDirection', 'tmXTLabelFont',
-                'tmXTLabelFontAspectF', 'tmXTLabelFontColor', 'tmXTLabelFontHeightF',
-                'tmXTLabelFontQuality', 'tmXTLabelFontThicknessF',
-                'tmXTLabelFuncCode', 'tmXTLabelJust', 'tmXTLabelStride', 'tmXTLabels',
-                'tmXTLabelsOn', 'tmXTMajorLengthF', 'tmXTMajorLineColor',
-                'tmXTMajorOutwardLengthF', 'tmXTMajorThicknessF', 'tmXTMaxLabelLenF',
-                'tmXTMaxTicks', 'tmXTMinLabelSpacingF', 'tmXTMinorLengthF',
-                'tmXTMinorLineColor', 'tmXTMinorOn', 'tmXTMinorOutwardLengthF',
-                'tmXTMinorPerMajor', 'tmXTMinorThicknessF', 'tmXTMinorValues',
-                'tmXTMode', 'tmXTOn', 'tmXTPrecision', 'tmXTStyle', 'tmXTTickEndF',
-                'tmXTTickSpacingF', 'tmXTTickStartF', 'tmXTValues', 'tmXUseBottom',
-                'tmYLAutoPrecision', 'tmYLBorderOn', 'tmYLDataBottomF',
-                'tmYLDataTopF', 'tmYLFormat', 'tmYLIrrTensionF',
-                'tmYLIrregularPoints', 'tmYLLabelAngleF', 'tmYLLabelConstantSpacingF',
-                'tmYLLabelDeltaF', 'tmYLLabelDirection', 'tmYLLabelFont',
-                'tmYLLabelFontAspectF', 'tmYLLabelFontColor', 'tmYLLabelFontHeightF',
-                'tmYLLabelFontQuality', 'tmYLLabelFontThicknessF',
-                'tmYLLabelFuncCode', 'tmYLLabelJust', 'tmYLLabelStride', 'tmYLLabels',
-                'tmYLLabelsOn', 'tmYLMajorLengthF', 'tmYLMajorLineColor',
-                'tmYLMajorOutwardLengthF', 'tmYLMajorThicknessF', 'tmYLMaxLabelLenF',
-                'tmYLMaxTicks', 'tmYLMinLabelSpacingF', 'tmYLMinorLengthF',
-                'tmYLMinorLineColor', 'tmYLMinorOn', 'tmYLMinorOutwardLengthF',
-                'tmYLMinorPerMajor', 'tmYLMinorThicknessF', 'tmYLMinorValues',
-                'tmYLMode', 'tmYLOn', 'tmYLPrecision', 'tmYLStyle', 'tmYLTickEndF',
-                'tmYLTickSpacingF', 'tmYLTickStartF', 'tmYLValues', 'tmYMajorGrid',
-                'tmYMajorGridLineColor', 'tmYMajorGridLineDashPattern',
-                'tmYMajorGridThicknessF', 'tmYMinorGrid', 'tmYMinorGridLineColor',
-                'tmYMinorGridLineDashPattern', 'tmYMinorGridThicknessF',
-                'tmYRAutoPrecision', 'tmYRBorderOn', 'tmYRDataBottomF',
-                'tmYRDataTopF', 'tmYRFormat', 'tmYRIrrTensionF',
-                'tmYRIrregularPoints', 'tmYRLabelAngleF', 'tmYRLabelConstantSpacingF',
-                'tmYRLabelDeltaF', 'tmYRLabelDirection', 'tmYRLabelFont',
-                'tmYRLabelFontAspectF', 'tmYRLabelFontColor', 'tmYRLabelFontHeightF',
-                'tmYRLabelFontQuality', 'tmYRLabelFontThicknessF',
-                'tmYRLabelFuncCode', 'tmYRLabelJust', 'tmYRLabelStride', 'tmYRLabels',
-                'tmYRLabelsOn', 'tmYRMajorLengthF', 'tmYRMajorLineColor',
-                'tmYRMajorOutwardLengthF', 'tmYRMajorThicknessF', 'tmYRMaxLabelLenF',
-                'tmYRMaxTicks', 'tmYRMinLabelSpacingF', 'tmYRMinorLengthF',
-                'tmYRMinorLineColor', 'tmYRMinorOn', 'tmYRMinorOutwardLengthF',
-                'tmYRMinorPerMajor', 'tmYRMinorThicknessF', 'tmYRMinorValues',
-                'tmYRMode', 'tmYROn', 'tmYRPrecision', 'tmYRStyle', 'tmYRTickEndF',
-                'tmYRTickSpacingF', 'tmYRTickStartF', 'tmYRValues', 'tmYUseLeft',
-                'trGridType', 'trLineInterpolationOn',
-                'trXAxisType', 'trXCoordPoints', 'trXInterPoints', 'trXLog',
-                'trXMaxF', 'trXMinF', 'trXReverse', 'trXSamples', 'trXTensionF',
-                'trYAxisType', 'trYCoordPoints', 'trYInterPoints', 'trYLog',
-                'trYMaxF', 'trYMinF', 'trYReverse', 'trYSamples', 'trYTensionF',
-                'txAngleF', 'txBackgroundFillColor', 'txConstantSpacingF', 'txDirection',
-                'txFont', 'HLU-Fonts', 'txFontAspectF', 'txFontColor',
-                'txFontHeightF', 'txFontOpacityF', 'txFontQuality',
-                'txFontThicknessF', 'txFuncCode', 'txJust', 'txPerimColor',
-                'txPerimDashLengthF', 'txPerimDashPattern', 'txPerimOn',
-                'txPerimSpaceF', 'txPerimThicknessF', 'txPosXF', 'txPosYF',
-                'txString', 'vcExplicitLabelBarLabelsOn', 'vcFillArrowEdgeColor',
-                'vcFillArrowEdgeThicknessF', 'vcFillArrowFillColor',
-                'vcFillArrowHeadInteriorXF', 'vcFillArrowHeadMinFracXF',
-                'vcFillArrowHeadMinFracYF', 'vcFillArrowHeadXF', 'vcFillArrowHeadYF',
-                'vcFillArrowMinFracWidthF', 'vcFillArrowWidthF', 'vcFillArrowsOn',
-                'vcFillOverEdge', 'vcGlyphOpacityF', 'vcGlyphStyle',
-                'vcLabelBarEndLabelsOn', 'vcLabelFontColor', 'vcLabelFontHeightF',
-                'vcLabelsOn', 'vcLabelsUseVectorColor', 'vcLevelColors',
-                'vcLevelCount', 'vcLevelPalette', 'vcLevelSelectionMode',
-                'vcLevelSpacingF', 'vcLevels', 'vcLineArrowColor',
-                'vcLineArrowHeadMaxSizeF', 'vcLineArrowHeadMinSizeF',
-                'vcLineArrowThicknessF', 'vcMagnitudeFormat',
-                'vcMagnitudeScaleFactorF', 'vcMagnitudeScaleValueF',
-                'vcMagnitudeScalingMode', 'vcMapDirection', 'vcMaxLevelCount',
-                'vcMaxLevelValF', 'vcMaxMagnitudeF', 'vcMinAnnoAngleF',
-                'vcMinAnnoArrowAngleF', 'vcMinAnnoArrowEdgeColor',
-                'vcMinAnnoArrowFillColor', 'vcMinAnnoArrowLineColor',
-                'vcMinAnnoArrowMinOffsetF', 'vcMinAnnoArrowSpaceF',
-                'vcMinAnnoArrowUseVecColor', 'vcMinAnnoBackgroundColor',
-                'vcMinAnnoConstantSpacingF', 'vcMinAnnoExplicitMagnitudeF',
-                'vcMinAnnoFont', 'vcMinAnnoFontAspectF', 'vcMinAnnoFontColor',
-                'vcMinAnnoFontHeightF', 'vcMinAnnoFontQuality',
-                'vcMinAnnoFontThicknessF', 'vcMinAnnoFuncCode', 'vcMinAnnoJust',
-                'vcMinAnnoOn', 'vcMinAnnoOrientation', 'vcMinAnnoOrthogonalPosF',
-                'vcMinAnnoParallelPosF', 'vcMinAnnoPerimColor', 'vcMinAnnoPerimOn',
-                'vcMinAnnoPerimSpaceF', 'vcMinAnnoPerimThicknessF', 'vcMinAnnoSide',
-                'vcMinAnnoString1', 'vcMinAnnoString1On', 'vcMinAnnoString2',
-                'vcMinAnnoString2On', 'vcMinAnnoTextDirection', 'vcMinAnnoZone',
-                'vcMinDistanceF', 'vcMinFracLengthF', 'vcMinLevelValF',
-                'vcMinMagnitudeF', 'vcMonoFillArrowEdgeColor',
-                'vcMonoFillArrowFillColor', 'vcMonoLineArrowColor',
-                'vcMonoWindBarbColor', 'vcNoDataLabelOn', 'vcNoDataLabelString',
-                'vcPositionMode', 'vcRefAnnoAngleF', 'vcRefAnnoArrowAngleF',
-                'vcRefAnnoArrowEdgeColor', 'vcRefAnnoArrowFillColor',
-                'vcRefAnnoArrowLineColor', 'vcRefAnnoArrowMinOffsetF',
-                'vcRefAnnoArrowSpaceF', 'vcRefAnnoArrowUseVecColor',
-                'vcRefAnnoBackgroundColor', 'vcRefAnnoConstantSpacingF',
-                'vcRefAnnoExplicitMagnitudeF', 'vcRefAnnoFont',
-                'vcRefAnnoFontAspectF', 'vcRefAnnoFontColor', 'vcRefAnnoFontHeightF',
-                'vcRefAnnoFontQuality', 'vcRefAnnoFontThicknessF',
-                'vcRefAnnoFuncCode', 'vcRefAnnoJust', 'vcRefAnnoOn',
-                'vcRefAnnoOrientation', 'vcRefAnnoOrthogonalPosF',
-                'vcRefAnnoParallelPosF', 'vcRefAnnoPerimColor', 'vcRefAnnoPerimOn',
-                'vcRefAnnoPerimSpaceF', 'vcRefAnnoPerimThicknessF', 'vcRefAnnoSide',
-                'vcRefAnnoString1', 'vcRefAnnoString1On', 'vcRefAnnoString2',
-                'vcRefAnnoString2On', 'vcRefAnnoTextDirection', 'vcRefAnnoZone',
-                'vcRefLengthF', 'vcRefMagnitudeF', 'vcScalarFieldData',
-                'vcScalarMissingValColor', 'vcScalarValueFormat',
-                'vcScalarValueScaleFactorF', 'vcScalarValueScaleValueF',
-                'vcScalarValueScalingMode', 'vcSpanLevelPalette', 'vcUseRefAnnoRes',
-                'vcUseScalarArray', 'vcVectorDrawOrder', 'vcVectorFieldData',
-                'vcWindBarbCalmCircleSizeF', 'vcWindBarbColor',
-                'vcWindBarbLineThicknessF', 'vcWindBarbScaleFactorF',
-                'vcWindBarbTickAngleF', 'vcWindBarbTickLengthF',
-                'vcWindBarbTickSpacingF', 'vcZeroFLabelAngleF',
-                'vcZeroFLabelBackgroundColor', 'vcZeroFLabelConstantSpacingF',
-                'vcZeroFLabelFont', 'vcZeroFLabelFontAspectF',
-                'vcZeroFLabelFontColor', 'vcZeroFLabelFontHeightF',
-                'vcZeroFLabelFontQuality', 'vcZeroFLabelFontThicknessF',
-                'vcZeroFLabelFuncCode', 'vcZeroFLabelJust', 'vcZeroFLabelOn',
-                'vcZeroFLabelOrthogonalPosF', 'vcZeroFLabelParallelPosF',
-                'vcZeroFLabelPerimColor', 'vcZeroFLabelPerimOn',
-                'vcZeroFLabelPerimSpaceF', 'vcZeroFLabelPerimThicknessF',
-                'vcZeroFLabelSide', 'vcZeroFLabelString', 'vcZeroFLabelTextDirection',
-                'vcZeroFLabelZone', 'vfCopyData', 'vfDataArray',
-                'vfExchangeDimensions', 'vfExchangeUVData', 'vfMagMaxV', 'vfMagMinV',
-                'vfMissingUValueV', 'vfMissingVValueV', 'vfPolarData',
-                'vfSingleMissingValue', 'vfUDataArray', 'vfUMaxV', 'vfUMinV',
-                'vfVDataArray', 'vfVMaxV', 'vfVMinV', 'vfXArray', 'vfXCActualEndF',
-                'vfXCActualStartF', 'vfXCEndIndex', 'vfXCEndSubsetV', 'vfXCEndV',
-                'vfXCStartIndex', 'vfXCStartSubsetV', 'vfXCStartV', 'vfXCStride',
-                'vfYArray', 'vfYCActualEndF', 'vfYCActualStartF', 'vfYCEndIndex',
-                'vfYCEndSubsetV', 'vfYCEndV', 'vfYCStartIndex', 'vfYCStartSubsetV',
-                'vfYCStartV', 'vfYCStride', 'vpAnnoManagerId', 'vpClipOn',
-                'vpHeightF', 'vpKeepAspect', 'vpOn', 'vpUseSegments', 'vpWidthF',
-                'vpXF', 'vpYF', 'wkAntiAlias', 'wkBackgroundColor', 'wkBackgroundOpacityF',
-                'wkColorMapLen', 'wkColorMap', 'wkColorModel', 'wkDashTableLength',
-                'wkDefGraphicStyleId', 'wkDeviceLowerX', 'wkDeviceLowerY',
-                'wkDeviceUpperX', 'wkDeviceUpperY', 'wkFileName', 'wkFillTableLength',
-                'wkForegroundColor', 'wkFormat', 'wkFullBackground', 'wkGksWorkId',
-                'wkHeight', 'wkMarkerTableLength', 'wkMetaName', 'wkOrientation',
-                'wkPDFFileName', 'wkPDFFormat', 'wkPDFResolution', 'wkPSFileName',
-                'wkPSFormat', 'wkPSResolution', 'wkPaperHeightF', 'wkPaperSize',
-                'wkPaperWidthF', 'wkPause', 'wkTopLevelViews', 'wkViews',
-                'wkVisualType', 'wkWidth', 'wkWindowId', 'wkXColorMode', 'wsCurrentSize',
-                'wsMaximumSize', 'wsThresholdSize', 'xyComputeXMax',
-                'xyComputeXMin', 'xyComputeYMax', 'xyComputeYMin', 'xyCoordData',
-                'xyCoordDataSpec', 'xyCurveDrawOrder', 'xyDashPattern',
-                'xyDashPatterns', 'xyExplicitLabels', 'xyExplicitLegendLabels',
-                'xyLabelMode', 'xyLineColor', 'xyLineColors', 'xyLineDashSegLenF',
-                'xyLineLabelConstantSpacingF', 'xyLineLabelFont',
-                'xyLineLabelFontAspectF', 'xyLineLabelFontColor',
-                'xyLineLabelFontColors', 'xyLineLabelFontHeightF',
-                'xyLineLabelFontQuality', 'xyLineLabelFontThicknessF',
-                'xyLineLabelFuncCode', 'xyLineThicknessF', 'xyLineThicknesses',
-                'xyMarkLineMode', 'xyMarkLineModes', 'xyMarker', 'xyMarkerColor',
-                'xyMarkerColors', 'xyMarkerSizeF', 'xyMarkerSizes',
-                'xyMarkerThicknessF', 'xyMarkerThicknesses', 'xyMarkers',
-                'xyMonoDashPattern', 'xyMonoLineColor', 'xyMonoLineLabelFontColor',
-                'xyMonoLineThickness', 'xyMonoMarkLineMode', 'xyMonoMarker',
-                'xyMonoMarkerColor', 'xyMonoMarkerSize', 'xyMonoMarkerThickness',
-                'xyXIrrTensionF', 'xyXIrregularPoints', 'xyXStyle', 'xyYIrrTensionF',
-                'xyYIrregularPoints', 'xyYStyle'), prefix=r'\b'),
-             Name.Builtin),
-
-            # Booleans
-            (r'\.(True|False)\.', Name.Builtin),
-            # Comparing Operators
-            (r'\.(eq|ne|lt|le|gt|ge|not|and|or|xor)\.', Operator.Word),
-        ],
-
-        'strings': [
-            (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
-        ],
-
-        'nums': [
-            (r'\d+(?![.e])(_[a-z]\w+)?', Number.Integer),
-            (r'[+-]?\d*\.\d+(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
-            (r'[+-]?\d+\.\d*(e[-+]?\d+)?(_[a-z]\w+)?', Number.Float),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py b/venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py
deleted file mode 100644
index 11f5b93..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/nimrod.py
+++ /dev/null
@@ -1,200 +0,0 @@
-"""
-    pygments.lexers.nimrod
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for the Nim language (formerly known as Nimrod).
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, default, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Error
-
-__all__ = ['NimrodLexer']
-
-
-class NimrodLexer(RegexLexer):
-    """
-    For Nim source code.
-
-    .. versionadded:: 1.5
-    """
-
-    name = 'Nimrod'
-    url = 'http://nim-lang.org/'
-    aliases = ['nimrod', 'nim']
-    filenames = ['*.nim', '*.nimrod']
-    mimetypes = ['text/x-nim']
-
-    flags = re.MULTILINE | re.IGNORECASE
-
-    def underscorize(words):
-        newWords = []
-        new = []
-        for word in words:
-            for ch in word:
-                new.append(ch)
-                new.append("_?")
-            newWords.append(''.join(new))
-            new = []
-        return "|".join(newWords)
-
-    keywords = [
-        'addr', 'and', 'as', 'asm', 'bind', 'block', 'break', 'case',
-        'cast', 'concept', 'const', 'continue', 'converter', 'defer', 'discard',
-        'distinct', 'div', 'do', 'elif', 'else', 'end', 'enum', 'except',
-        'export', 'finally', 'for', 'if', 'in', 'yield', 'interface',
-        'is', 'isnot', 'iterator', 'let', 'mixin', 'mod',
-        'not', 'notin', 'object', 'of', 'or', 'out', 'ptr', 'raise',
-        'ref', 'return', 'shl', 'shr', 'static', 'try',
-        'tuple', 'type', 'using', 'when', 'while', 'xor'
-    ]
-
-    keywordsPseudo = [
-        'nil', 'true', 'false'
-    ]
-
-    opWords = [
-        'and', 'or', 'not', 'xor', 'shl', 'shr', 'div', 'mod', 'in',
-        'notin', 'is', 'isnot'
-    ]
-
-    types = [
-        'int', 'int8', 'int16', 'int32', 'int64', 'float', 'float32', 'float64',
-        'bool', 'char', 'range', 'array', 'seq', 'set', 'string'
-    ]
-
-    tokens = {
-        'root': [
-            # Comments
-            (r'##\[', String.Doc, 'doccomment'),
-            (r'##.*$', String.Doc),
-            (r'#\[', Comment.Multiline, 'comment'),
-            (r'#.*$', Comment),
-
-            # Pragmas
-            (r'\{\.', String.Other, 'pragma'),
-
-            # Operators
-            (r'[*=><+\-/@$~&%!?|\\\[\]]', Operator),
-            (r'\.\.|\.|,|\[\.|\.\]|\{\.|\.\}|\(\.|\.\)|\{|\}|\(|\)|:|\^|`|;',
-             Punctuation),
-
-            # Case statement branch
-            (r'(\n\s*)(of)(\s)', bygroups(Text.Whitespace, Keyword,
-                                          Text.Whitespace), 'casebranch'),
-
-            # Strings
-            (r'(?:[\w]+)"', String, 'rdqs'),
-            (r'"""', String.Double, 'tdqs'),
-            ('"', String, 'dqs'),
-
-            # Char
-            ("'", String.Char, 'chars'),
-
-            # Keywords
-            (r'(%s)\b' % underscorize(opWords), Operator.Word),
-            (r'(proc|func|method|macro|template)(\s)(?![(\[\]])',
-             bygroups(Keyword, Text.Whitespace), 'funcname'),
-            (r'(%s)\b' % underscorize(keywords), Keyword),
-            (r'(%s)\b' % underscorize(['from', 'import', 'include', 'export']),
-             Keyword.Namespace),
-            (r'(v_?a_?r)\b', Keyword.Declaration),
-            (r'(%s)\b' % underscorize(types), Name.Builtin),
-            (r'(%s)\b' % underscorize(keywordsPseudo), Keyword.Pseudo),
-
-            # Identifiers
-            (r'\b((?![_\d])\w)(((?!_)\w)|(_(?!_)\w))*', Name),
-
-            # Numbers
-            (r'[0-9][0-9_]*(?=([e.]|\'f(32|64)))',
-             Number.Float, ('float-suffix', 'float-number')),
-            (r'0x[a-f0-9][a-f0-9_]*', Number.Hex, 'int-suffix'),
-            (r'0b[01][01_]*', Number.Bin, 'int-suffix'),
-            (r'0o[0-7][0-7_]*', Number.Oct, 'int-suffix'),
-            (r'[0-9][0-9_]*', Number.Integer, 'int-suffix'),
-
-            # Whitespace
-            (r'\s+', Text.Whitespace),
-            (r'.+$', Error),
-        ],
-        'chars': [
-            (r'\\([\\abcefnrtvl"\']|x[a-f0-9]{2}|[0-9]{1,3})', String.Escape),
-            (r"'", String.Char, '#pop'),
-            (r".", String.Char)
-        ],
-        'strings': [
-            (r'(?|>=|>>|>|<=|<<|<|\+|-|=|/|\*|%|\+=|-=|!|@', Operator),
-            (r'\(|\)|\[|\]|,|\.\.\.|\.\.|\.|::|:', Punctuation),
-            (r'`\{[^`]*`\}', Text),  # Extern blocks won't be Lexed by Nit
-            (r'[\r\n\t ]+', Text),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/nix.py b/venv/lib/python3.11/site-packages/pygments/lexers/nix.py
deleted file mode 100644
index 7ab59bb..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/nix.py
+++ /dev/null
@@ -1,145 +0,0 @@
-"""
-    pygments.lexers.nix
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the NixOS Nix language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Literal
-
-__all__ = ['NixLexer']
-
-
-class NixLexer(RegexLexer):
-    """
-    For the Nix language.
-
-    .. versionadded:: 2.0
-    """
-
-    name = 'Nix'
-    url = 'http://nixos.org/nix/'
-    aliases = ['nixos', 'nix']
-    filenames = ['*.nix']
-    mimetypes = ['text/x-nix']
-
-    keywords = ['rec', 'with', 'let', 'in', 'inherit', 'assert', 'if',
-                'else', 'then', '...']
-    builtins = ['import', 'abort', 'baseNameOf', 'dirOf', 'isNull', 'builtins',
-                'map', 'removeAttrs', 'throw', 'toString', 'derivation']
-    operators = ['++', '+', '?', '.', '!', '//', '==', '/',
-                 '!=', '&&', '||', '->', '=', '<', '>', '*', '-']
-
-    punctuations = ["(", ")", "[", "]", ";", "{", "}", ":", ",", "@"]
-
-    tokens = {
-        'root': [
-            # comments starting with #
-            (r'#.*$', Comment.Single),
-
-            # multiline comments
-            (r'/\*', Comment.Multiline, 'comment'),
-
-            # whitespace
-            (r'\s+', Text),
-
-            # keywords
-            ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in keywords), Keyword),
-
-            # highlight the builtins
-            ('(%s)' % '|'.join(re.escape(entry) + '\\b' for entry in builtins),
-             Name.Builtin),
-
-            (r'\b(true|false|null)\b', Name.Constant),
-
-            # floats
-            (r'-?(\d+\.\d*|\.\d+)([eE][-+]?\d+)?', Number.Float),
-
-            # integers
-            (r'-?[0-9]+', Number.Integer),
-
-            # paths
-            (r'[\w.+-]*(\/[\w.+-]+)+', Literal),
-            (r'~(\/[\w.+-]+)+', Literal),
-            (r'\<[\w.+-]+(\/[\w.+-]+)*\>', Literal),
-
-            # operators
-            ('(%s)' % '|'.join(re.escape(entry) for entry in operators),
-             Operator),
-
-            # word operators
-            (r'\b(or|and)\b', Operator.Word),
-
-            (r'\{', Punctuation, 'block'),
-
-            # punctuations
-            ('(%s)' % '|'.join(re.escape(entry) for entry in punctuations), Punctuation),
-
-            # strings
-            (r'"', String.Double, 'doublequote'),
-            (r"''", String.Multiline, 'multiline'),
-
-            # urls
-            (r'[a-zA-Z][a-zA-Z0-9\+\-\.]*\:[\w%/?:@&=+$,\\.!~*\'-]+', Literal),
-
-            # names of variables
-            (r'[\w-]+(?=\s*=)', String.Symbol),
-            (r'[a-zA-Z_][\w\'-]*', Text),
-
-            (r"\$\{", String.Interpol, 'antiquote'),
-        ],
-        'comment': [
-            (r'[^/*]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline),
-        ],
-        'multiline': [
-            (r"''(\$|'|\\n|\\r|\\t|\\)", String.Escape),
-            (r"''", String.Multiline, '#pop'),
-            (r'\$\{', String.Interpol, 'antiquote'),
-            (r"[^'\$]+", String.Multiline),
-            (r"\$[^\{']", String.Multiline),
-            (r"'[^']", String.Multiline),
-            (r"\$(?=')", String.Multiline),
-        ],
-        'doublequote': [
-            (r'\\(\\|"|\$|n)', String.Escape),
-            (r'"', String.Double, '#pop'),
-            (r'\$\{', String.Interpol, 'antiquote'),
-            (r'[^"\\\$]+', String.Double),
-            (r'\$[^\{"]', String.Double),
-            (r'\$(?=")', String.Double),
-            (r'\\', String.Double),
-        ],
-        'antiquote': [
-            (r"\}", String.Interpol, '#pop'),
-            # TODO: we should probably escape also here ''${ \${
-            (r"\$\{", String.Interpol, '#push'),
-            include('root'),
-        ],
-        'block': [
-            (r"\}", Punctuation, '#pop'),
-            include('root'),
-        ],
-    }
-
-    def analyse_text(text):
-        rv = 0.0
-        # TODO: let/in
-        if re.search(r'import.+?<[^>]+>', text):
-            rv += 0.4
-        if re.search(r'mkDerivation\s+(\(|\{|rec)', text):
-            rv += 0.4
-        if re.search(r'=\s+mkIf\s+', text):
-            rv += 0.4
-        if re.search(r'\{[a-zA-Z,\s]+\}:', text):
-            rv += 0.1
-        return rv
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/oberon.py b/venv/lib/python3.11/site-packages/pygments/lexers/oberon.py
deleted file mode 100644
index 3aaa763..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/oberon.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
-    pygments.lexers.oberon
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Oberon family languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['ComponentPascalLexer']
-
-
-class ComponentPascalLexer(RegexLexer):
-    """
-    For Component Pascal source code.
-
-    .. versionadded:: 2.1
-    """
-    name = 'Component Pascal'
-    aliases = ['componentpascal', 'cp']
-    filenames = ['*.cp', '*.cps']
-    mimetypes = ['text/x-component-pascal']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            include('comments'),
-            include('punctuation'),
-            include('numliterals'),
-            include('strings'),
-            include('operators'),
-            include('builtins'),
-            include('identifiers'),
-        ],
-        'whitespace': [
-            (r'\n+', Text),  # blank lines
-            (r'\s+', Text),  # whitespace
-        ],
-        'comments': [
-            (r'\(\*([^$].*?)\*\)', Comment.Multiline),
-            # TODO: nested comments (* (* ... *) ... (* ... *) *) not supported!
-        ],
-        'punctuation': [
-            (r'[()\[\]{},.:;|]', Punctuation),
-        ],
-        'numliterals': [
-            (r'[0-9A-F]+X\b', Number.Hex),                 # char code
-            (r'[0-9A-F]+[HL]\b', Number.Hex),              # hexadecimal number
-            (r'[0-9]+\.[0-9]+E[+-][0-9]+', Number.Float),  # real number
-            (r'[0-9]+\.[0-9]+', Number.Float),             # real number
-            (r'[0-9]+', Number.Integer),                   # decimal whole number
-        ],
-        'strings': [
-            (r"'[^\n']*'", String),  # single quoted string
-            (r'"[^\n"]*"', String),  # double quoted string
-        ],
-        'operators': [
-            # Arithmetic Operators
-            (r'[+-]', Operator),
-            (r'[*/]', Operator),
-            # Relational Operators
-            (r'[=#<>]', Operator),
-            # Dereferencing Operator
-            (r'\^', Operator),
-            # Logical AND Operator
-            (r'&', Operator),
-            # Logical NOT Operator
-            (r'~', Operator),
-            # Assignment Symbol
-            (r':=', Operator),
-            # Range Constructor
-            (r'\.\.', Operator),
-            (r'\$', Operator),
-        ],
-        'identifiers': [
-            (r'([a-zA-Z_$][\w$]*)', Name),
-        ],
-        'builtins': [
-            (words((
-                'ANYPTR', 'ANYREC', 'BOOLEAN', 'BYTE', 'CHAR', 'INTEGER', 'LONGINT',
-                'REAL', 'SET', 'SHORTCHAR', 'SHORTINT', 'SHORTREAL'
-                ), suffix=r'\b'), Keyword.Type),
-            (words((
-                'ABS', 'ABSTRACT', 'ARRAY', 'ASH', 'ASSERT', 'BEGIN', 'BITS', 'BY',
-                'CAP', 'CASE', 'CHR', 'CLOSE', 'CONST', 'DEC', 'DIV', 'DO', 'ELSE',
-                'ELSIF', 'EMPTY', 'END', 'ENTIER', 'EXCL', 'EXIT', 'EXTENSIBLE', 'FOR',
-                'HALT', 'IF', 'IMPORT', 'IN', 'INC', 'INCL', 'IS', 'LEN', 'LIMITED',
-                'LONG', 'LOOP', 'MAX', 'MIN', 'MOD', 'MODULE', 'NEW', 'ODD', 'OF',
-                'OR', 'ORD', 'OUT', 'POINTER', 'PROCEDURE', 'RECORD', 'REPEAT', 'RETURN',
-                'SHORT', 'SHORTCHAR', 'SHORTINT', 'SIZE', 'THEN', 'TYPE', 'TO', 'UNTIL',
-                'VAR', 'WHILE', 'WITH'
-                ), suffix=r'\b'), Keyword.Reserved),
-            (r'(TRUE|FALSE|NIL|INF)\b', Keyword.Constant),
-        ]
-    }
-
-    def analyse_text(text):
-        """The only other lexer using .cp is the C++ one, so we check if for
-        a few common Pascal keywords here. Those are unfortunately quite
-        common across various business languages as well."""
-        result = 0
-        if 'BEGIN' in text:
-            result += 0.01
-        if 'END' in text:
-            result += 0.01
-        if 'PROCEDURE' in text:
-            result += 0.01
-        if 'END' in text:
-            result += 0.01
-
-        return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/objective.py b/venv/lib/python3.11/site-packages/pygments/lexers/objective.py
deleted file mode 100644
index 2e4332a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/objective.py
+++ /dev/null
@@ -1,505 +0,0 @@
-"""
-    pygments.lexers.objective
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Objective-C family languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this, words, \
-    inherit, default
-from pygments.token import Text, Keyword, Name, String, Operator, \
-    Number, Punctuation, Literal, Comment
-
-from pygments.lexers.c_cpp import CLexer, CppLexer
-
-__all__ = ['ObjectiveCLexer', 'ObjectiveCppLexer', 'LogosLexer', 'SwiftLexer']
-
-
-def objective(baselexer):
-    """
-    Generate a subclass of baselexer that accepts the Objective-C syntax
-    extensions.
-    """
-
-    # Have to be careful not to accidentally match JavaDoc/Doxygen syntax here,
-    # since that's quite common in ordinary C/C++ files.  It's OK to match
-    # JavaDoc/Doxygen keywords that only apply to Objective-C, mind.
-    #
-    # The upshot of this is that we CANNOT match @class or @interface
-    _oc_keywords = re.compile(r'@(?:end|implementation|protocol)')
-
-    # Matches [ ? identifier  ( identifier ? ] |  identifier? : )
-    # (note the identifier is *optional* when there is a ':'!)
-    _oc_message = re.compile(r'\[\s*[a-zA-Z_]\w*\s+'
-                             r'(?:[a-zA-Z_]\w*\s*\]|'
-                             r'(?:[a-zA-Z_]\w*)?:)')
-
-    class GeneratedObjectiveCVariant(baselexer):
-        """
-        Implements Objective-C syntax on top of an existing C family lexer.
-        """
-
-        tokens = {
-            'statements': [
-                (r'@"', String, 'string'),
-                (r'@(YES|NO)', Number),
-                (r"@'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-                (r'@(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[lL]?', Number.Float),
-                (r'@(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-                (r'@0x[0-9a-fA-F]+[Ll]?', Number.Hex),
-                (r'@0[0-7]+[Ll]?', Number.Oct),
-                (r'@\d+[Ll]?', Number.Integer),
-                (r'@\(', Literal, 'literal_number'),
-                (r'@\[', Literal, 'literal_array'),
-                (r'@\{', Literal, 'literal_dictionary'),
-                (words((
-                    '@selector', '@private', '@protected', '@public', '@encode',
-                    '@synchronized', '@try', '@throw', '@catch', '@finally',
-                    '@end', '@property', '@synthesize', '__bridge', '__bridge_transfer',
-                    '__autoreleasing', '__block', '__weak', '__strong', 'weak', 'strong',
-                    'copy', 'retain', 'assign', 'unsafe_unretained', 'atomic', 'nonatomic',
-                    'readonly', 'readwrite', 'setter', 'getter', 'typeof', 'in',
-                    'out', 'inout', 'release', 'class', '@dynamic', '@optional',
-                    '@required', '@autoreleasepool', '@import'), suffix=r'\b'),
-                 Keyword),
-                (words(('id', 'instancetype', 'Class', 'IMP', 'SEL', 'BOOL',
-                        'IBOutlet', 'IBAction', 'unichar'), suffix=r'\b'),
-                 Keyword.Type),
-                (r'@(true|false|YES|NO)\n', Name.Builtin),
-                (r'(YES|NO|nil|self|super)\b', Name.Builtin),
-                # Carbon types
-                (r'(Boolean|UInt8|SInt8|UInt16|SInt16|UInt32|SInt32)\b', Keyword.Type),
-                # Carbon built-ins
-                (r'(TRUE|FALSE)\b', Name.Builtin),
-                (r'(@interface|@implementation)(\s+)', bygroups(Keyword, Text),
-                 ('#pop', 'oc_classname')),
-                (r'(@class|@protocol)(\s+)', bygroups(Keyword, Text),
-                 ('#pop', 'oc_forward_classname')),
-                # @ can also prefix other expressions like @{...} or @(...)
-                (r'@', Punctuation),
-                inherit,
-            ],
-            'oc_classname': [
-                # interface definition that inherits
-                (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?(\s*)(\{)',
-                 bygroups(Name.Class, Text, Name.Class, Text, Punctuation),
-                 ('#pop', 'oc_ivars')),
-                (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
-                 bygroups(Name.Class, Text, Name.Class), '#pop'),
-                # interface definition for a category
-                (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))(\s*)(\{)',
-                 bygroups(Name.Class, Text, Name.Label, Text, Punctuation),
-                 ('#pop', 'oc_ivars')),
-                (r'([a-zA-Z$_][\w$]*)(\s*)(\([a-zA-Z$_][\w$]*\))',
-                 bygroups(Name.Class, Text, Name.Label), '#pop'),
-                # simple interface / implementation
-                (r'([a-zA-Z$_][\w$]*)(\s*)(\{)',
-                 bygroups(Name.Class, Text, Punctuation), ('#pop', 'oc_ivars')),
-                (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
-            ],
-            'oc_forward_classname': [
-                (r'([a-zA-Z$_][\w$]*)(\s*,\s*)',
-                 bygroups(Name.Class, Text), 'oc_forward_classname'),
-                (r'([a-zA-Z$_][\w$]*)(\s*;?)',
-                 bygroups(Name.Class, Text), '#pop')
-            ],
-            'oc_ivars': [
-                include('whitespace'),
-                include('statements'),
-                (';', Punctuation),
-                (r'\{', Punctuation, '#push'),
-                (r'\}', Punctuation, '#pop'),
-            ],
-            'root': [
-                # methods
-                (r'^([-+])(\s*)'                         # method marker
-                 r'(\(.*?\))?(\s*)'                      # return type
-                 r'([a-zA-Z$_][\w$]*:?)',        # begin of method name
-                 bygroups(Punctuation, Text, using(this),
-                          Text, Name.Function),
-                 'method'),
-                inherit,
-            ],
-            'method': [
-                include('whitespace'),
-                # TODO unsure if ellipses are allowed elsewhere, see
-                # discussion in Issue 789
-                (r',', Punctuation),
-                (r'\.\.\.', Punctuation),
-                (r'(\(.*?\))(\s*)([a-zA-Z$_][\w$]*)',
-                 bygroups(using(this), Text, Name.Variable)),
-                (r'[a-zA-Z$_][\w$]*:', Name.Function),
-                (';', Punctuation, '#pop'),
-                (r'\{', Punctuation, 'function'),
-                default('#pop'),
-            ],
-            'literal_number': [
-                (r'\(', Punctuation, 'literal_number_inner'),
-                (r'\)', Literal, '#pop'),
-                include('statement'),
-            ],
-            'literal_number_inner': [
-                (r'\(', Punctuation, '#push'),
-                (r'\)', Punctuation, '#pop'),
-                include('statement'),
-            ],
-            'literal_array': [
-                (r'\[', Punctuation, 'literal_array_inner'),
-                (r'\]', Literal, '#pop'),
-                include('statement'),
-            ],
-            'literal_array_inner': [
-                (r'\[', Punctuation, '#push'),
-                (r'\]', Punctuation, '#pop'),
-                include('statement'),
-            ],
-            'literal_dictionary': [
-                (r'\}', Literal, '#pop'),
-                include('statement'),
-            ],
-        }
-
-        def analyse_text(text):
-            if _oc_keywords.search(text):
-                return 1.0
-            elif '@"' in text:  # strings
-                return 0.8
-            elif re.search('@[0-9]+', text):
-                return 0.7
-            elif _oc_message.search(text):
-                return 0.8
-            return 0
-
-        def get_tokens_unprocessed(self, text, stack=('root',)):
-            from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
-                COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
-            for index, token, value in \
-                    baselexer.get_tokens_unprocessed(self, text, stack):
-                if token is Name or token is Name.Class:
-                    if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
-                       or value in COCOA_PRIMITIVES:
-                        token = Name.Builtin.Pseudo
-
-                yield index, token, value
-
-    return GeneratedObjectiveCVariant
-
-
-class ObjectiveCLexer(objective(CLexer)):
-    """
-    For Objective-C source code with preprocessor directives.
-    """
-
-    name = 'Objective-C'
-    url = 'https://developer.apple.com/library/archive/documentation/Cocoa/Conceptual/ProgrammingWithObjectiveC/Introduction/Introduction.html'
-    aliases = ['objective-c', 'objectivec', 'obj-c', 'objc']
-    filenames = ['*.m', '*.h']
-    mimetypes = ['text/x-objective-c']
-    priority = 0.05    # Lower than C
-
-
-class ObjectiveCppLexer(objective(CppLexer)):
-    """
-    For Objective-C++ source code with preprocessor directives.
-    """
-
-    name = 'Objective-C++'
-    aliases = ['objective-c++', 'objectivec++', 'obj-c++', 'objc++']
-    filenames = ['*.mm', '*.hh']
-    mimetypes = ['text/x-objective-c++']
-    priority = 0.05    # Lower than C++
-
-
-class LogosLexer(ObjectiveCppLexer):
-    """
-    For Logos + Objective-C source code with preprocessor directives.
-
-    .. versionadded:: 1.6
-    """
-
-    name = 'Logos'
-    aliases = ['logos']
-    filenames = ['*.x', '*.xi', '*.xm', '*.xmi']
-    mimetypes = ['text/x-logos']
-    priority = 0.25
-
-    tokens = {
-        'statements': [
-            (r'(%orig|%log)\b', Keyword),
-            (r'(%c)\b(\()(\s*)([a-zA-Z$_][\w$]*)(\s*)(\))',
-             bygroups(Keyword, Punctuation, Text, Name.Class, Text, Punctuation)),
-            (r'(%init)\b(\()',
-             bygroups(Keyword, Punctuation), 'logos_init_directive'),
-            (r'(%init)(?=\s*;)', bygroups(Keyword)),
-            (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
-             bygroups(Keyword, Text, Name.Class), '#pop'),
-            (r'(%subclass)(\s+)', bygroups(Keyword, Text),
-             ('#pop', 'logos_classname')),
-            inherit,
-        ],
-        'logos_init_directive': [
-            (r'\s+', Text),
-            (',', Punctuation, ('logos_init_directive', '#pop')),
-            (r'([a-zA-Z$_][\w$]*)(\s*)(=)(\s*)([^);]*)',
-             bygroups(Name.Class, Text, Punctuation, Text, Text)),
-            (r'([a-zA-Z$_][\w$]*)', Name.Class),
-            (r'\)', Punctuation, '#pop'),
-        ],
-        'logos_classname': [
-            (r'([a-zA-Z$_][\w$]*)(\s*:\s*)([a-zA-Z$_][\w$]*)?',
-             bygroups(Name.Class, Text, Name.Class), '#pop'),
-            (r'([a-zA-Z$_][\w$]*)', Name.Class, '#pop')
-        ],
-        'root': [
-            (r'(%subclass)(\s+)', bygroups(Keyword, Text),
-             'logos_classname'),
-            (r'(%hook|%group)(\s+)([a-zA-Z$_][\w$]+)',
-             bygroups(Keyword, Text, Name.Class)),
-            (r'(%config)(\s*\(\s*)(\w+)(\s*=)(.*?)(\)\s*)',
-             bygroups(Keyword, Text, Name.Variable, Text, String, Text)),
-            (r'(%ctor)(\s*)(\{)', bygroups(Keyword, Text, Punctuation),
-             'function'),
-            (r'(%new)(\s*)(\()(.*?)(\))',
-             bygroups(Keyword, Text, Keyword, String, Keyword)),
-            (r'(\s*)(%end)(\s*)', bygroups(Text, Keyword, Text)),
-            inherit,
-        ],
-    }
-
-    _logos_keywords = re.compile(r'%(?:hook|ctor|init|c\()')
-
-    def analyse_text(text):
-        if LogosLexer._logos_keywords.search(text):
-            return 1.0
-        return 0
-
-
-class SwiftLexer(RegexLexer):
-    """
-    For Swift source.
-
-    .. versionadded:: 2.0
-    """
-    name = 'Swift'
-    url = 'https://www.swift.org/'
-    filenames = ['*.swift']
-    aliases = ['swift']
-    mimetypes = ['text/x-swift']
-
-    tokens = {
-        'root': [
-            # Whitespace and Comments
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'//', Comment.Single, 'comment-single'),
-            (r'/\*', Comment.Multiline, 'comment-multi'),
-            (r'#(if|elseif|else|endif|available)\b', Comment.Preproc, 'preproc'),
-
-            # Keywords
-            include('keywords'),
-
-            # Global Types
-            (words((
-                'Array', 'AutoreleasingUnsafeMutablePointer', 'BidirectionalReverseView',
-                'Bit', 'Bool', 'CFunctionPointer', 'COpaquePointer', 'CVaListPointer',
-                'Character', 'ClosedInterval', 'CollectionOfOne', 'ContiguousArray',
-                'Dictionary', 'DictionaryGenerator', 'DictionaryIndex', 'Double',
-                'EmptyCollection', 'EmptyGenerator', 'EnumerateGenerator',
-                'EnumerateSequence', 'FilterCollectionView',
-                'FilterCollectionViewIndex', 'FilterGenerator', 'FilterSequenceView',
-                'Float', 'Float80', 'FloatingPointClassification', 'GeneratorOf',
-                'GeneratorOfOne', 'GeneratorSequence', 'HalfOpenInterval', 'HeapBuffer',
-                'HeapBufferStorage', 'ImplicitlyUnwrappedOptional', 'IndexingGenerator',
-                'Int', 'Int16', 'Int32', 'Int64', 'Int8', 'LazyBidirectionalCollection',
-                'LazyForwardCollection', 'LazyRandomAccessCollection',
-                'LazySequence', 'MapCollectionView', 'MapSequenceGenerator',
-                'MapSequenceView', 'MirrorDisposition', 'ObjectIdentifier', 'OnHeap',
-                'Optional', 'PermutationGenerator', 'QuickLookObject',
-                'RandomAccessReverseView', 'Range', 'RangeGenerator', 'RawByte', 'Repeat',
-                'ReverseBidirectionalIndex', 'ReverseRandomAccessIndex', 'SequenceOf',
-                'SinkOf', 'Slice', 'StaticString', 'StrideThrough', 'StrideThroughGenerator',
-                'StrideTo', 'StrideToGenerator', 'String', 'UInt', 'UInt16', 'UInt32',
-                'UInt64', 'UInt8', 'UTF16', 'UTF32', 'UTF8', 'UnicodeDecodingResult',
-                'UnicodeScalar', 'Unmanaged', 'UnsafeBufferPointer',
-                'UnsafeBufferPointerGenerator', 'UnsafeMutableBufferPointer',
-                'UnsafeMutablePointer', 'UnsafePointer', 'Zip2', 'ZipGenerator2',
-                # Protocols
-                'AbsoluteValuable', 'AnyObject', 'ArrayLiteralConvertible',
-                'BidirectionalIndexType', 'BitwiseOperationsType',
-                'BooleanLiteralConvertible', 'BooleanType', 'CVarArgType',
-                'CollectionType', 'Comparable', 'DebugPrintable',
-                'DictionaryLiteralConvertible', 'Equatable',
-                'ExtendedGraphemeClusterLiteralConvertible',
-                'ExtensibleCollectionType', 'FloatLiteralConvertible',
-                'FloatingPointType', 'ForwardIndexType', 'GeneratorType', 'Hashable',
-                'IntegerArithmeticType', 'IntegerLiteralConvertible', 'IntegerType',
-                'IntervalType', 'MirrorType', 'MutableCollectionType', 'MutableSliceable',
-                'NilLiteralConvertible', 'OutputStreamType', 'Printable',
-                'RandomAccessIndexType', 'RangeReplaceableCollectionType',
-                'RawOptionSetType', 'RawRepresentable', 'Reflectable', 'SequenceType',
-                'SignedIntegerType', 'SignedNumberType', 'SinkType', 'Sliceable',
-                'Streamable', 'Strideable', 'StringInterpolationConvertible',
-                'StringLiteralConvertible', 'UnicodeCodecType',
-                'UnicodeScalarLiteralConvertible', 'UnsignedIntegerType',
-                '_ArrayBufferType', '_BidirectionalIndexType', '_CocoaStringType',
-                '_CollectionType', '_Comparable', '_ExtensibleCollectionType',
-                '_ForwardIndexType', '_Incrementable', '_IntegerArithmeticType',
-                '_IntegerType', '_ObjectiveCBridgeable', '_RandomAccessIndexType',
-                '_RawOptionSetType', '_SequenceType', '_Sequence_Type',
-                '_SignedIntegerType', '_SignedNumberType', '_Sliceable', '_Strideable',
-                '_SwiftNSArrayRequiredOverridesType', '_SwiftNSArrayType',
-                '_SwiftNSCopyingType', '_SwiftNSDictionaryRequiredOverridesType',
-                '_SwiftNSDictionaryType', '_SwiftNSEnumeratorType',
-                '_SwiftNSFastEnumerationType', '_SwiftNSStringRequiredOverridesType',
-                '_SwiftNSStringType', '_UnsignedIntegerType',
-                # Variables
-                'C_ARGC', 'C_ARGV', 'Process',
-                # Typealiases
-                'Any', 'AnyClass', 'BooleanLiteralType', 'CBool', 'CChar', 'CChar16',
-                'CChar32', 'CDouble', 'CFloat', 'CInt', 'CLong', 'CLongLong', 'CShort',
-                'CSignedChar', 'CUnsignedInt', 'CUnsignedLong', 'CUnsignedShort',
-                'CWideChar', 'ExtendedGraphemeClusterType', 'Float32', 'Float64',
-                'FloatLiteralType', 'IntMax', 'IntegerLiteralType', 'StringLiteralType',
-                'UIntMax', 'UWord', 'UnicodeScalarType', 'Void', 'Word',
-                # Foundation/Cocoa
-                'NSErrorPointer', 'NSObjectProtocol', 'Selector'), suffix=r'\b'),
-             Name.Builtin),
-            # Functions
-            (words((
-                'abs', 'advance', 'alignof', 'alignofValue', 'assert', 'assertionFailure',
-                'contains', 'count', 'countElements', 'debugPrint', 'debugPrintln',
-                'distance', 'dropFirst', 'dropLast', 'dump', 'enumerate', 'equal',
-                'extend', 'fatalError', 'filter', 'find', 'first', 'getVaList', 'indices',
-                'insert', 'isEmpty', 'join', 'last', 'lazy', 'lexicographicalCompare',
-                'map', 'max', 'maxElement', 'min', 'minElement', 'numericCast', 'overlaps',
-                'partition', 'precondition', 'preconditionFailure', 'prefix', 'print',
-                'println', 'reduce', 'reflect', 'removeAll', 'removeAtIndex', 'removeLast',
-                'removeRange', 'reverse', 'sizeof', 'sizeofValue', 'sort', 'sorted',
-                'splice', 'split', 'startsWith', 'stride', 'strideof', 'strideofValue',
-                'suffix', 'swap', 'toDebugString', 'toString', 'transcode',
-                'underestimateCount', 'unsafeAddressOf', 'unsafeBitCast', 'unsafeDowncast',
-                'withExtendedLifetime', 'withUnsafeMutablePointer',
-                'withUnsafeMutablePointers', 'withUnsafePointer', 'withUnsafePointers',
-                'withVaList'), suffix=r'\b'),
-             Name.Builtin.Pseudo),
-
-            # Implicit Block Variables
-            (r'\$\d+', Name.Variable),
-
-            # Binary Literal
-            (r'0b[01_]+', Number.Bin),
-            # Octal Literal
-            (r'0o[0-7_]+', Number.Oct),
-            # Hexadecimal Literal
-            (r'0x[0-9a-fA-F_]+', Number.Hex),
-            # Decimal Literal
-            (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
-             r'\.[0-9_]*|[eE][+\-]?[0-9_]+)', Number.Float),
-            (r'[0-9][0-9_]*', Number.Integer),
-            # String Literal
-            (r'"', String, 'string'),
-
-            # Operators and Punctuation
-            (r'[(){}\[\].,:;=@#`?]|->|[<&?](?=\w)|(?<=\w)[>!?]', Punctuation),
-            (r'[/=\-+!*%<>&|^?~]+', Operator),
-
-            # Identifier
-            (r'[a-zA-Z_]\w*', Name)
-        ],
-        'keywords': [
-            (words((
-                'as', 'async', 'await', 'break', 'case', 'catch', 'continue', 'default', 'defer',
-                'do', 'else', 'fallthrough', 'for', 'guard', 'if', 'in', 'is',
-                'repeat', 'return', '#selector', 'switch', 'throw', 'try',
-                'where', 'while'), suffix=r'\b'),
-             Keyword),
-            (r'@availability\([^)]+\)', Keyword.Reserved),
-            (words((
-                'associativity', 'convenience', 'dynamic', 'didSet', 'final',
-                'get', 'indirect', 'infix', 'inout', 'lazy', 'left', 'mutating',
-                'none', 'nonmutating', 'optional', 'override', 'postfix',
-                'precedence', 'prefix', 'Protocol', 'required', 'rethrows',
-                'right', 'set', 'throws', 'Type', 'unowned', 'weak', 'willSet',
-                '@availability', '@autoclosure', '@noreturn',
-                '@NSApplicationMain', '@NSCopying', '@NSManaged', '@objc',
-                '@UIApplicationMain', '@IBAction', '@IBDesignable',
-                '@IBInspectable', '@IBOutlet'), suffix=r'\b'),
-             Keyword.Reserved),
-            (r'(as|dynamicType|false|is|nil|self|Self|super|true|__COLUMN__'
-             r'|__FILE__|__FUNCTION__|__LINE__|_'
-             r'|#(?:file|line|column|function))\b', Keyword.Constant),
-            (r'import\b', Keyword.Declaration, 'module'),
-            (r'(class|enum|extension|struct|protocol)(\s+)([a-zA-Z_]\w*)',
-             bygroups(Keyword.Declaration, Text, Name.Class)),
-            (r'(func)(\s+)([a-zA-Z_]\w*)',
-             bygroups(Keyword.Declaration, Text, Name.Function)),
-            (r'(var|let)(\s+)([a-zA-Z_]\w*)', bygroups(Keyword.Declaration,
-             Text, Name.Variable)),
-            (words((
-                'actor', 'associatedtype', 'class', 'deinit', 'enum', 'extension', 'func', 'import',
-                'init', 'internal', 'let', 'operator', 'private', 'protocol', 'public',
-                'static', 'struct', 'subscript', 'typealias', 'var'), suffix=r'\b'),
-             Keyword.Declaration)
-        ],
-        'comment': [
-            (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
-             Comment.Special)
-        ],
-
-        # Nested
-        'comment-single': [
-            (r'\n', Text, '#pop'),
-            include('comment'),
-            (r'[^\n]', Comment.Single)
-        ],
-        'comment-multi': [
-            include('comment'),
-            (r'[^*/]', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ],
-        'module': [
-            (r'\n', Text, '#pop'),
-            (r'[a-zA-Z_]\w*', Name.Class),
-            include('root')
-        ],
-        'preproc': [
-            (r'\n', Text, '#pop'),
-            include('keywords'),
-            (r'[A-Za-z]\w*', Comment.Preproc),
-            include('root')
-        ],
-        'string': [
-            (r'\\\(', String.Interpol, 'string-intp'),
-            (r'"', String, '#pop'),
-            (r"""\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\[0-7]{1,3}"""
-             r"""|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}""", String.Escape),
-            (r'[^\\"]+', String),
-            (r'\\', String)
-        ],
-        'string-intp': [
-            (r'\(', String.Interpol, '#push'),
-            (r'\)', String.Interpol, '#pop'),
-            include('root')
-        ]
-    }
-
-    def get_tokens_unprocessed(self, text):
-        from pygments.lexers._cocoa_builtins import COCOA_INTERFACES, \
-            COCOA_PROTOCOLS, COCOA_PRIMITIVES
-
-        for index, token, value in \
-                RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name or token is Name.Class:
-                if value in COCOA_INTERFACES or value in COCOA_PROTOCOLS \
-                   or value in COCOA_PRIMITIVES:
-                    token = Name.Builtin.Pseudo
-
-            yield index, token, value
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ooc.py b/venv/lib/python3.11/site-packages/pygments/lexers/ooc.py
deleted file mode 100644
index c4600ea..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ooc.py
+++ /dev/null
@@ -1,85 +0,0 @@
-"""
-    pygments.lexers.ooc
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the Ooc language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['OocLexer']
-
-
-class OocLexer(RegexLexer):
-    """
-    For Ooc source code
-
-    .. versionadded:: 1.2
-    """
-    name = 'Ooc'
-    url = 'http://ooc-lang.org/'
-    aliases = ['ooc']
-    filenames = ['*.ooc']
-    mimetypes = ['text/x-ooc']
-
-    tokens = {
-        'root': [
-            (words((
-                'class', 'interface', 'implement', 'abstract', 'extends', 'from',
-                'this', 'super', 'new', 'const', 'final', 'static', 'import',
-                'use', 'extern', 'inline', 'proto', 'break', 'continue',
-                'fallthrough', 'operator', 'if', 'else', 'for', 'while', 'do',
-                'switch', 'case', 'as', 'in', 'version', 'return', 'true',
-                'false', 'null'), prefix=r'\b', suffix=r'\b'),
-             Keyword),
-            (r'include\b', Keyword, 'include'),
-            (r'(cover)([ \t]+)(from)([ \t]+)(\w+[*@]?)',
-             bygroups(Keyword, Text, Keyword, Text, Name.Class)),
-            (r'(func)((?:[ \t]|\\\n)+)(~[a-z_]\w*)',
-             bygroups(Keyword, Text, Name.Function)),
-            (r'\bfunc\b', Keyword),
-            # Note: %= and ^= not listed on http://ooc-lang.org/syntax
-            (r'//.*', Comment),
-            (r'(?s)/\*.*?\*/', Comment.Multiline),
-            (r'(==?|\+=?|-[=>]?|\*=?|/=?|:=|!=?|%=?|\?|>{1,3}=?|<{1,3}=?|\.\.|'
-             r'&&?|\|\|?|\^=?)', Operator),
-            (r'(\.)([ \t]*)([a-z]\w*)', bygroups(Operator, Text,
-                                                 Name.Function)),
-            (r'[A-Z][A-Z0-9_]+', Name.Constant),
-            (r'[A-Z]\w*([@*]|\[[ \t]*\])?', Name.Class),
-
-            (r'([a-z]\w*(?:~[a-z]\w*)?)((?:[ \t]|\\\n)*)(?=\()',
-             bygroups(Name.Function, Text)),
-            (r'[a-z]\w*', Name.Variable),
-
-            # : introduces types
-            (r'[:(){}\[\];,]', Punctuation),
-
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'0c[0-9]+', Number.Oct),
-            (r'0b[01]+', Number.Bin),
-            (r'[0-9_]\.[0-9_]*(?!\.)', Number.Float),
-            (r'[0-9_]+', Number.Decimal),
-
-            (r'"(?:\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\"])*"',
-             String.Double),
-            (r"'(?:\\.|\\[0-9]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'",
-             String.Char),
-            (r'@', Punctuation),  # pointer dereference
-            (r'\.', Punctuation),  # imports or chain operator
-
-            (r'\\[ \t\n]', Text),
-            (r'[ \t]+', Text),
-        ],
-        'include': [
-            (r'[\w/]+', Name),
-            (r',', Punctuation),
-            (r'[ \t]', Text),
-            (r'[;\n]', Text, '#pop'),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/openscad.py b/venv/lib/python3.11/site-packages/pygments/lexers/openscad.py
deleted file mode 100644
index de8fdaf..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/openscad.py
+++ /dev/null
@@ -1,97 +0,0 @@
-"""
-    pygments.lexers.openscad
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the OpenSCAD languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words, include
-from pygments.token import Text, Comment, Punctuation, Operator, Keyword, Name, Number, Whitespace, Literal, String
-
-__all__ = ['OpenScadLexer']
-
-
-class OpenScadLexer(RegexLexer):
-    """For openSCAD code.
-
-    .. versionadded:: 2.16
-    """
-    name = "OpenSCAD"
-    url = "https://openscad.org/"
-    aliases = ["openscad"]
-    filenames = ["*.scad"]
-    mimetypes = ["application/x-openscad"]
-
-    tokens = {
-        "root": [
-            (r"[^\S\n]+", Whitespace),
-            (r'//', Comment.Single, 'comment-single'),
-            (r'/\*', Comment.Multiline, 'comment-multi'),
-            (r"[{}\[\]\(\),;:]", Punctuation),
-            (r"[*!#%\-+=?/]", Operator),
-            (r"<=|<|==|!=|>=|>|&&|\|\|", Operator),
-            (r"\$(f[asn]|t|vp[rtd]|children)", Operator),
-            (r"(undef|PI)\b", Keyword.Constant),
-            (
-                r"(use|include)((?:\s|\\\\s)+)",
-                bygroups(Keyword.Namespace, Text),
-                "includes",
-            ),
-            (r"(module)(\s*)([^\s\(]+)",
-             bygroups(Keyword.Namespace, Whitespace, Name.Namespace)),
-            (r"(function)(\s*)([^\s\(]+)",
-             bygroups(Keyword.Declaration, Whitespace, Name.Function)),
-            (words(("true", "false"), prefix=r"\b", suffix=r"\b"), Literal),
-            (words((
-                "function", "module", "include", "use", "for",
-                "intersection_for", "if", "else", "return"
-                ), prefix=r"\b", suffix=r"\b"), Keyword
-            ),
-            (words((
-                "circle", "square", "polygon", "text", "sphere", "cube",
-                "cylinder", "polyhedron", "translate", "rotate", "scale",
-                "resize", "mirror", "multmatrix", "color", "offset", "hull",
-                "minkowski", "union", "difference", "intersection", "abs",
-                "sign", "sin", "cos", "tan", "acos", "asin", "atan", "atan2",
-                "floor", "round", "ceil", "ln", "log", "pow", "sqrt", "exp",
-                "rands", "min", "max", "concat", "lookup", "str", "chr",
-                "search", "version", "version_num", "norm", "cross",
-                "parent_module", "echo", "import", "import_dxf",
-                "dxf_linear_extrude", "linear_extrude", "rotate_extrude",
-                "surface", "projection", "render", "dxf_cross",
-                "dxf_dim", "let", "assign", "len"
-                ), prefix=r"\b", suffix=r"\b"),
-                Name.Builtin
-            ),
-            (r"\bchildren\b", Name.Builtin.Pseudo),
-            (r'""".*?"""', String.Double),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r"-?\d+(\.\d+)?(e[+-]?\d+)?", Number),
-            (r"\w+", Name),
-        ],
-        "includes": [
-            (
-                r"(<)([^>]*)(>)",
-                bygroups(Punctuation, Comment.PreprocFile, Punctuation),
-            ),
-        ],
-        'comment': [
-            (r':param: [a-zA-Z_]\w*|:returns?:|(FIXME|MARK|TODO):',
-             Comment.Special)
-        ],
-        'comment-single': [
-            (r'\n', Text, '#pop'),
-            include('comment'),
-            (r'[^\n]+', Comment.Single)
-        ],
-        'comment-multi': [
-            include('comment'),
-            (r'[^*/]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/other.py b/venv/lib/python3.11/site-packages/pygments/lexers/other.py
deleted file mode 100644
index f2c07d7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/other.py
+++ /dev/null
@@ -1,40 +0,0 @@
-"""
-    pygments.lexers.other
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Just export lexer classes previously contained in this module.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexers.sql import SqlLexer, MySqlLexer, SqliteConsoleLexer
-from pygments.lexers.shell import BashLexer, BashSessionLexer, BatchLexer, \
-    TcshLexer
-from pygments.lexers.robotframework import RobotFrameworkLexer
-from pygments.lexers.testing import GherkinLexer
-from pygments.lexers.esoteric import BrainfuckLexer, BefungeLexer, RedcodeLexer
-from pygments.lexers.prolog import LogtalkLexer
-from pygments.lexers.snobol import SnobolLexer
-from pygments.lexers.rebol import RebolLexer
-from pygments.lexers.configs import KconfigLexer, Cfengine3Lexer
-from pygments.lexers.modeling import ModelicaLexer
-from pygments.lexers.scripting import AppleScriptLexer, MOOCodeLexer, \
-    HybrisLexer
-from pygments.lexers.graphics import PostScriptLexer, GnuplotLexer, \
-    AsymptoteLexer, PovrayLexer
-from pygments.lexers.business import ABAPLexer, OpenEdgeLexer, \
-    GoodDataCLLexer, MaqlLexer
-from pygments.lexers.automation import AutoItLexer, AutohotkeyLexer
-from pygments.lexers.dsls import ProtoBufLexer, BroLexer, PuppetLexer, \
-    MscgenLexer, VGLLexer
-from pygments.lexers.basic import CbmBasicV2Lexer
-from pygments.lexers.pawn import SourcePawnLexer, PawnLexer
-from pygments.lexers.ecl import ECLLexer
-from pygments.lexers.urbi import UrbiscriptLexer
-from pygments.lexers.smalltalk import SmalltalkLexer, NewspeakLexer
-from pygments.lexers.installers import NSISLexer, RPMSpecLexer
-from pygments.lexers.textedit import AwkLexer
-from pygments.lexers.smv import NuSMVLexer
-
-__all__ = []
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/parasail.py b/venv/lib/python3.11/site-packages/pygments/lexers/parasail.py
deleted file mode 100644
index 5a7238e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/parasail.py
+++ /dev/null
@@ -1,79 +0,0 @@
-"""
-    pygments.lexers.parasail
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for ParaSail.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Literal
-
-__all__ = ['ParaSailLexer']
-
-
-class ParaSailLexer(RegexLexer):
-    """
-    For ParaSail source code.
-
-    .. versionadded:: 2.1
-    """
-
-    name = 'ParaSail'
-    url = 'http://www.parasail-lang.org'
-    aliases = ['parasail']
-    filenames = ['*.psi', '*.psl']
-    mimetypes = ['text/x-parasail']
-
-    flags = re.MULTILINE
-
-    tokens = {
-        'root': [
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'\b(and|or|xor)=', Operator.Word),
-            (r'\b(and(\s+then)?|or(\s+else)?|xor|rem|mod|'
-             r'(is|not)\s+null)\b',
-             Operator.Word),
-            # Keywords
-            (r'\b(abs|abstract|all|block|class|concurrent|const|continue|'
-             r'each|end|exit|extends|exports|forward|func|global|implements|'
-             r'import|in|interface|is|lambda|locked|new|not|null|of|op|'
-             r'optional|private|queued|ref|return|reverse|separate|some|'
-             r'type|until|var|with|'
-             # Control flow
-             r'if|then|else|elsif|case|for|while|loop)\b',
-             Keyword.Reserved),
-            (r'(abstract\s+)?(interface|class|op|func|type)',
-             Keyword.Declaration),
-            # Literals
-            (r'"[^"]*"', String),
-            (r'\\[\'ntrf"0]', String.Escape),
-            (r'#[a-zA-Z]\w*', Literal),       # Enumeration
-            include('numbers'),
-            (r"'[^']'", String.Char),
-            (r'[a-zA-Z]\w*', Name),
-            # Operators and Punctuation
-            (r'(<==|==>|<=>|\*\*=|<\|=|<<=|>>=|==|!=|=\?|<=|>=|'
-             r'\*\*|<<|>>|=>|:=|\+=|-=|\*=|\|=|\||/=|\+|-|\*|/|'
-             r'\.\.|<\.\.|\.\.<|<\.\.<)',
-             Operator),
-            (r'(<|>|\[|\]|\(|\)|\||:|;|,|.|\{|\}|->)',
-             Punctuation),
-            (r'\n+', Text),
-        ],
-        'numbers': [
-            (r'\d[0-9_]*#[0-9a-fA-F][0-9a-fA-F_]*#', Number.Hex),  # any base
-            (r'0[xX][0-9a-fA-F][0-9a-fA-F_]*', Number.Hex),        # C-like hex
-            (r'0[bB][01][01_]*', Number.Bin),                      # C-like bin
-            (r'\d[0-9_]*\.\d[0-9_]*[eE][+-]\d[0-9_]*',             # float exp
-             Number.Float),
-            (r'\d[0-9_]*\.\d[0-9_]*', Number.Float),               # float
-            (r'\d[0-9_]*', Number.Integer),                        # integer
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/parsers.py b/venv/lib/python3.11/site-packages/pygments/lexers/parsers.py
deleted file mode 100644
index 0415ac6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/parsers.py
+++ /dev/null
@@ -1,801 +0,0 @@
-"""
-    pygments.lexers.parsers
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for parser generators.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, DelegatingLexer, \
-    include, bygroups, using
-from pygments.token import Punctuation, Other, Text, Comment, Operator, \
-    Keyword, Name, String, Number, Whitespace
-from pygments.lexers.jvm import JavaLexer
-from pygments.lexers.c_cpp import CLexer, CppLexer
-from pygments.lexers.objective import ObjectiveCLexer
-from pygments.lexers.d import DLexer
-from pygments.lexers.dotnet import CSharpLexer
-from pygments.lexers.ruby import RubyLexer
-from pygments.lexers.python import PythonLexer
-from pygments.lexers.perl import PerlLexer
-
-__all__ = ['RagelLexer', 'RagelEmbeddedLexer', 'RagelCLexer', 'RagelDLexer',
-           'RagelCppLexer', 'RagelObjectiveCLexer', 'RagelRubyLexer',
-           'RagelJavaLexer', 'AntlrLexer', 'AntlrPythonLexer',
-           'AntlrPerlLexer', 'AntlrRubyLexer', 'AntlrCppLexer',
-           'AntlrCSharpLexer', 'AntlrObjectiveCLexer',
-           'AntlrJavaLexer', 'AntlrActionScriptLexer',
-           'TreetopLexer', 'EbnfLexer']
-
-
-class RagelLexer(RegexLexer):
-    """A pure `Ragel `_ lexer.  Use this
-    for fragments of Ragel.  For ``.rl`` files, use
-    :class:`RagelEmbeddedLexer` instead (or one of the
-    language-specific subclasses).
-
-    .. versionadded:: 1.1
-
-    """
-
-    name = 'Ragel'
-    url = 'http://www.colm.net/open-source/ragel/'
-    aliases = ['ragel']
-    filenames = []
-
-    tokens = {
-        'whitespace': [
-            (r'\s+', Whitespace)
-        ],
-        'comments': [
-            (r'\#.*$', Comment),
-        ],
-        'keywords': [
-            (r'(access|action|alphtype)\b', Keyword),
-            (r'(getkey|write|machine|include)\b', Keyword),
-            (r'(any|ascii|extend|alpha|digit|alnum|lower|upper)\b', Keyword),
-            (r'(xdigit|cntrl|graph|print|punct|space|zlen|empty)\b', Keyword)
-        ],
-        'numbers': [
-            (r'0x[0-9A-Fa-f]+', Number.Hex),
-            (r'[+-]?[0-9]+', Number.Integer),
-        ],
-        'literals': [
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
-            (r'\[(\\\\|\\[^\\]|[^\\\]])*\]', String),          # square bracket literals
-            (r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/', String.Regex),  # regular expressions
-        ],
-        'identifiers': [
-            (r'[a-zA-Z_]\w*', Name.Variable),
-        ],
-        'operators': [
-            (r',', Operator),                           # Join
-            (r'\||&|--?', Operator),                    # Union, Intersection and Subtraction
-            (r'\.|<:|:>>?', Operator),                  # Concatention
-            (r':', Operator),                           # Label
-            (r'->', Operator),                          # Epsilon Transition
-            (r'(>|\$|%|<|@|<>)(/|eof\b)', Operator),    # EOF Actions
-            (r'(>|\$|%|<|@|<>)(!|err\b)', Operator),    # Global Error Actions
-            (r'(>|\$|%|<|@|<>)(\^|lerr\b)', Operator),  # Local Error Actions
-            (r'(>|\$|%|<|@|<>)(~|to\b)', Operator),     # To-State Actions
-            (r'(>|\$|%|<|@|<>)(\*|from\b)', Operator),  # From-State Actions
-            (r'>|@|\$|%', Operator),                    # Transition Actions and Priorities
-            (r'\*|\?|\+|\{[0-9]*,[0-9]*\}', Operator),  # Repetition
-            (r'!|\^', Operator),                        # Negation
-            (r'\(|\)', Operator),                       # Grouping
-        ],
-        'root': [
-            include('literals'),
-            include('whitespace'),
-            include('comments'),
-            include('keywords'),
-            include('numbers'),
-            include('identifiers'),
-            include('operators'),
-            (r'\{', Punctuation, 'host'),
-            (r'=', Operator),
-            (r';', Punctuation),
-        ],
-        'host': [
-            (r'(' + r'|'.join((  # keep host code in largest possible chunks
-                r'[^{}\'"/#]+',  # exclude unsafe characters
-                r'[^\\]\\[{}]',  # allow escaped { or }
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\[^\\]|[^"\\])*"',
-                r"'(\\\\|\\[^\\]|[^'\\])*'",
-                r'//.*$\n?',            # single line comment
-                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
-                r'\#.*$\n?',            # ruby comment
-
-                # regular expression: There's no reason for it to start
-                # with a * and this stops confusion with comments.
-                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
-                # / is safe now that we've handled regex and javadoc comments
-                r'/',
-            )) + r')+', Other),
-
-            (r'\{', Punctuation, '#push'),
-            (r'\}', Punctuation, '#pop'),
-        ],
-    }
-
-
-class RagelEmbeddedLexer(RegexLexer):
-    """
-    A lexer for Ragel embedded in a host language file.
-
-    This will only highlight Ragel statements. If you want host language
-    highlighting then call the language-specific Ragel lexer.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Embedded Ragel'
-    aliases = ['ragel-em']
-    filenames = ['*.rl']
-
-    tokens = {
-        'root': [
-            (r'(' + r'|'.join((   # keep host code in largest possible chunks
-                r'[^%\'"/#]+',    # exclude unsafe characters
-                r'%(?=[^%]|$)',   # a single % sign is okay, just not 2 of them
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\[^\\]|[^"\\])*"',
-                r"'(\\\\|\\[^\\]|[^'\\])*'",
-                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
-                r'//.*$\n?',  # single line comment
-                r'\#.*$\n?',  # ruby/ragel comment
-                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',  # regular expression
-
-                # / is safe now that we've handled regex and javadoc comments
-                r'/',
-            )) + r')+', Other),
-
-            # Single Line FSM.
-            # Please don't put a quoted newline in a single line FSM.
-            # That's just mean. It will break this.
-            (r'(%%)(?![{%])(.*)($|;)(\n?)', bygroups(Punctuation,
-                                                     using(RagelLexer),
-                                                     Punctuation, Text)),
-
-            # Multi Line FSM.
-            (r'(%%%%|%%)\{', Punctuation, 'multi-line-fsm'),
-        ],
-        'multi-line-fsm': [
-            (r'(' + r'|'.join((  # keep ragel code in largest possible chunks.
-                r'(' + r'|'.join((
-                    r'[^}\'"\[/#]',   # exclude unsafe characters
-                    r'\}(?=[^%]|$)',   # } is okay as long as it's not followed by %
-                    r'\}%(?=[^%]|$)',  # ...well, one %'s okay, just not two...
-                    r'[^\\]\\[{}]',   # ...and } is okay if it's escaped
-
-                    # allow / if it's preceded with one of these symbols
-                    # (ragel EOF actions)
-                    r'(>|\$|%|<|@|<>)/',
-
-                    # specifically allow regex followed immediately by *
-                    # so it doesn't get mistaken for a comment
-                    r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/\*',
-
-                    # allow / as long as it's not followed by another / or by a *
-                    r'/(?=[^/*]|$)',
-
-                    # We want to match as many of these as we can in one block.
-                    # Not sure if we need the + sign here,
-                    # does it help performance?
-                )) + r')+',
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\[^\\]|[^"\\])*"',
-                r"'(\\\\|\\[^\\]|[^'\\])*'",
-                r"\[(\\\\|\\[^\\]|[^\]\\])*\]",  # square bracket literal
-                r'/\*(.|\n)*?\*/',          # multi-line javadoc-style comment
-                r'//.*$\n?',                # single line comment
-                r'\#.*$\n?',                # ruby/ragel comment
-            )) + r')+', using(RagelLexer)),
-
-            (r'\}%%', Punctuation, '#pop'),
-        ]
-    }
-
-    def analyse_text(text):
-        return '@LANG: indep' in text
-
-
-class RagelRubyLexer(DelegatingLexer):
-    """
-    A lexer for Ragel in a Ruby host file.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Ragel in Ruby Host'
-    aliases = ['ragel-ruby', 'ragel-rb']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super().__init__(RubyLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: ruby' in text
-
-
-class RagelCLexer(DelegatingLexer):
-    """
-    A lexer for Ragel in a C host file.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Ragel in C Host'
-    aliases = ['ragel-c']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super().__init__(CLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: c' in text
-
-
-class RagelDLexer(DelegatingLexer):
-    """
-    A lexer for Ragel in a D host file.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Ragel in D Host'
-    aliases = ['ragel-d']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super().__init__(DLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: d' in text
-
-
-class RagelCppLexer(DelegatingLexer):
-    """
-    A lexer for Ragel in a C++ host file.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Ragel in CPP Host'
-    aliases = ['ragel-cpp']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super().__init__(CppLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: c++' in text
-
-
-class RagelObjectiveCLexer(DelegatingLexer):
-    """
-    A lexer for Ragel in an Objective C host file.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Ragel in Objective C Host'
-    aliases = ['ragel-objc']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super().__init__(ObjectiveCLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: objc' in text
-
-
-class RagelJavaLexer(DelegatingLexer):
-    """
-    A lexer for Ragel in a Java host file.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Ragel in Java Host'
-    aliases = ['ragel-java']
-    filenames = ['*.rl']
-
-    def __init__(self, **options):
-        super().__init__(JavaLexer, RagelEmbeddedLexer, **options)
-
-    def analyse_text(text):
-        return '@LANG: java' in text
-
-
-class AntlrLexer(RegexLexer):
-    """
-    Generic `ANTLR`_ Lexer.
-    Should not be called directly, instead
-    use DelegatingLexer for your target language.
-
-    .. versionadded:: 1.1
-
-    .. _ANTLR: http://www.antlr.org/
-    """
-
-    name = 'ANTLR'
-    aliases = ['antlr']
-    filenames = []
-
-    _id = r'[A-Za-z]\w*'
-    _TOKEN_REF = r'[A-Z]\w*'
-    _RULE_REF = r'[a-z]\w*'
-    _STRING_LITERAL = r'\'(?:\\\\|\\\'|[^\']*)\''
-    _INT = r'[0-9]+'
-
-    tokens = {
-        'whitespace': [
-            (r'\s+', Whitespace),
-        ],
-        'comments': [
-            (r'//.*$', Comment),
-            (r'/\*(.|\n)*?\*/', Comment),
-        ],
-        'root': [
-            include('whitespace'),
-            include('comments'),
-
-            (r'(lexer|parser|tree)?(\s*)(grammar\b)(\s*)(' + _id + ')(;)',
-             bygroups(Keyword, Whitespace, Keyword, Whitespace, Name.Class,
-                      Punctuation)),
-            # optionsSpec
-            (r'options\b', Keyword, 'options'),
-            # tokensSpec
-            (r'tokens\b', Keyword, 'tokens'),
-            # attrScope
-            (r'(scope)(\s*)(' + _id + r')(\s*)(\{)',
-             bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
-                      Punctuation), 'action'),
-            # exception
-            (r'(catch|finally)\b', Keyword, 'exception'),
-            # action
-            (r'(@' + _id + r')(\s*)(::)?(\s*)(' + _id + r')(\s*)(\{)',
-             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
-                      Name.Label, Whitespace, Punctuation), 'action'),
-            # rule
-            (r'((?:protected|private|public|fragment)\b)?(\s*)(' + _id + ')(!)?',
-             bygroups(Keyword, Whitespace, Name.Label, Punctuation),
-             ('rule-alts', 'rule-prelims')),
-        ],
-        'exception': [
-            (r'\n', Whitespace, '#pop'),
-            (r'\s', Whitespace),
-            include('comments'),
-
-            (r'\[', Punctuation, 'nested-arg-action'),
-            (r'\{', Punctuation, 'action'),
-        ],
-        'rule-prelims': [
-            include('whitespace'),
-            include('comments'),
-
-            (r'returns\b', Keyword),
-            (r'\[', Punctuation, 'nested-arg-action'),
-            (r'\{', Punctuation, 'action'),
-            # throwsSpec
-            (r'(throws)(\s+)(' + _id + ')',
-             bygroups(Keyword, Whitespace, Name.Label)),
-            (r'(,)(\s*)(' + _id + ')',
-             bygroups(Punctuation, Whitespace, Name.Label)),  # Additional throws
-            # optionsSpec
-            (r'options\b', Keyword, 'options'),
-            # ruleScopeSpec - scope followed by target language code or name of action
-            # TODO finish implementing other possibilities for scope
-            # L173 ANTLRv3.g from ANTLR book
-            (r'(scope)(\s+)(\{)', bygroups(Keyword, Whitespace, Punctuation),
-             'action'),
-            (r'(scope)(\s+)(' + _id + r')(\s*)(;)',
-             bygroups(Keyword, Whitespace, Name.Label, Whitespace, Punctuation)),
-            # ruleAction
-            (r'(@' + _id + r')(\s*)(\{)',
-             bygroups(Name.Label, Whitespace, Punctuation), 'action'),
-            # finished prelims, go to rule alts!
-            (r':', Punctuation, '#pop')
-        ],
-        'rule-alts': [
-            include('whitespace'),
-            include('comments'),
-
-            # These might need to go in a separate 'block' state triggered by (
-            (r'options\b', Keyword, 'options'),
-            (r':', Punctuation),
-
-            # literals
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
-            (r'<<([^>]|>[^>])>>', String),
-            # identifiers
-            # Tokens start with capital letter.
-            (r'\$?[A-Z_]\w*', Name.Constant),
-            # Rules start with small letter.
-            (r'\$?[a-z_]\w*', Name.Variable),
-            # operators
-            (r'(\+|\||->|=>|=|\(|\)|\.\.|\.|\?|\*|\^|!|\#|~)', Operator),
-            (r',', Punctuation),
-            (r'\[', Punctuation, 'nested-arg-action'),
-            (r'\{', Punctuation, 'action'),
-            (r';', Punctuation, '#pop')
-        ],
-        'tokens': [
-            include('whitespace'),
-            include('comments'),
-            (r'\{', Punctuation),
-            (r'(' + _TOKEN_REF + r')(\s*)(=)?(\s*)(' + _STRING_LITERAL
-             + r')?(\s*)(;)',
-             bygroups(Name.Label, Whitespace, Punctuation, Whitespace,
-                      String, Whitespace, Punctuation)),
-            (r'\}', Punctuation, '#pop'),
-        ],
-        'options': [
-            include('whitespace'),
-            include('comments'),
-            (r'\{', Punctuation),
-            (r'(' + _id + r')(\s*)(=)(\s*)(' +
-             '|'.join((_id, _STRING_LITERAL, _INT, r'\*')) + r')(\s*)(;)',
-             bygroups(Name.Variable, Whitespace, Punctuation, Whitespace,
-                      Text, Whitespace, Punctuation)),
-            (r'\}', Punctuation, '#pop'),
-        ],
-        'action': [
-            (r'(' + r'|'.join((    # keep host code in largest possible chunks
-                r'[^${}\'"/\\]+',  # exclude unsafe characters
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\[^\\]|[^"\\])*"',
-                r"'(\\\\|\\[^\\]|[^'\\])*'",
-                r'//.*$\n?',            # single line comment
-                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
-
-                # regular expression: There's no reason for it to start
-                # with a * and this stops confusion with comments.
-                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
-                # backslashes are okay, as long as we are not backslashing a %
-                r'\\(?!%)',
-
-                # Now that we've handled regex and javadoc comments
-                # it's safe to let / through.
-                r'/',
-            )) + r')+', Other),
-            (r'(\\)(%)', bygroups(Punctuation, Other)),
-            (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
-             bygroups(Name.Variable, Punctuation, Name.Property)),
-            (r'\{', Punctuation, '#push'),
-            (r'\}', Punctuation, '#pop'),
-        ],
-        'nested-arg-action': [
-            (r'(' + r'|'.join((    # keep host code in largest possible chunks.
-                r'[^$\[\]\'"/]+',  # exclude unsafe characters
-
-                # strings and comments may safely contain unsafe characters
-                r'"(\\\\|\\[^\\]|[^"\\])*"',
-                r"'(\\\\|\\[^\\]|[^'\\])*'",
-                r'//.*$\n?',            # single line comment
-                r'/\*(.|\n)*?\*/',      # multi-line javadoc-style comment
-
-                # regular expression: There's no reason for it to start
-                # with a * and this stops confusion with comments.
-                r'/(?!\*)(\\\\|\\[^\\]|[^/\\])*/',
-
-                # Now that we've handled regex and javadoc comments
-                # it's safe to let / through.
-                r'/',
-            )) + r')+', Other),
-
-
-            (r'\[', Punctuation, '#push'),
-            (r'\]', Punctuation, '#pop'),
-            (r'(\$[a-zA-Z]+)(\.?)(text|value)?',
-             bygroups(Name.Variable, Punctuation, Name.Property)),
-            (r'(\\\\|\\\]|\\\[|[^\[\]])+', Other),
-        ]
-    }
-
-    def analyse_text(text):
-        return re.search(r'^\s*grammar\s+[a-zA-Z0-9]+\s*;', text, re.M)
-
-
-# http://www.antlr.org/wiki/display/ANTLR3/Code+Generation+Targets
-
-class AntlrCppLexer(DelegatingLexer):
-    """
-    ANTLR with C++ Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With CPP Target'
-    aliases = ['antlr-cpp']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(CppLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*C\s*;', text, re.M)
-
-
-class AntlrObjectiveCLexer(DelegatingLexer):
-    """
-    ANTLR with Objective-C Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With ObjectiveC Target'
-    aliases = ['antlr-objc']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(ObjectiveCLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*ObjC\s*;', text)
-
-
-class AntlrCSharpLexer(DelegatingLexer):
-    """
-    ANTLR with C# Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With C# Target'
-    aliases = ['antlr-csharp', 'antlr-c#']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(CSharpLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*CSharp2\s*;', text, re.M)
-
-
-class AntlrPythonLexer(DelegatingLexer):
-    """
-    ANTLR with Python Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With Python Target'
-    aliases = ['antlr-python']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(PythonLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*Python\s*;', text, re.M)
-
-
-class AntlrJavaLexer(DelegatingLexer):
-    """
-    ANTLR with Java Target
-
-    .. versionadded:: 1.
-    """
-
-    name = 'ANTLR With Java Target'
-    aliases = ['antlr-java']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(JavaLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        # Antlr language is Java by default
-        return AntlrLexer.analyse_text(text) and 0.9
-
-
-class AntlrRubyLexer(DelegatingLexer):
-    """
-    ANTLR with Ruby Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With Ruby Target'
-    aliases = ['antlr-ruby', 'antlr-rb']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(RubyLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*Ruby\s*;', text, re.M)
-
-
-class AntlrPerlLexer(DelegatingLexer):
-    """
-    ANTLR with Perl Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With Perl Target'
-    aliases = ['antlr-perl']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        super().__init__(PerlLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*Perl5\s*;', text, re.M)
-
-
-class AntlrActionScriptLexer(DelegatingLexer):
-    """
-    ANTLR with ActionScript Target
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'ANTLR With ActionScript Target'
-    aliases = ['antlr-actionscript', 'antlr-as']
-    filenames = ['*.G', '*.g']
-
-    def __init__(self, **options):
-        from pygments.lexers.actionscript import ActionScriptLexer
-        super().__init__(ActionScriptLexer, AntlrLexer, **options)
-
-    def analyse_text(text):
-        return AntlrLexer.analyse_text(text) and \
-            re.search(r'^\s*language\s*=\s*ActionScript\s*;', text, re.M)
-
-
-class TreetopBaseLexer(RegexLexer):
-    """
-    A base lexer for `Treetop `_ grammars.
-    Not for direct use; use :class:`TreetopLexer` instead.
-
-    .. versionadded:: 1.6
-    """
-
-    tokens = {
-        'root': [
-            include('space'),
-            (r'require[ \t]+[^\n\r]+[\n\r]', Other),
-            (r'module\b', Keyword.Namespace, 'module'),
-            (r'grammar\b', Keyword, 'grammar'),
-        ],
-        'module': [
-            include('space'),
-            include('end'),
-            (r'module\b', Keyword, '#push'),
-            (r'grammar\b', Keyword, 'grammar'),
-            (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Namespace),
-        ],
-        'grammar': [
-            include('space'),
-            include('end'),
-            (r'rule\b', Keyword, 'rule'),
-            (r'include\b', Keyword, 'include'),
-            (r'[A-Z]\w*', Name),
-        ],
-        'include': [
-            include('space'),
-            (r'[A-Z]\w*(?:::[A-Z]\w*)*', Name.Class, '#pop'),
-        ],
-        'rule': [
-            include('space'),
-            include('end'),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
-            (r'([A-Za-z_]\w*)(:)', bygroups(Name.Label, Punctuation)),
-            (r'[A-Za-z_]\w*', Name),
-            (r'[()]', Punctuation),
-            (r'[?+*/&!~]', Operator),
-            (r'\[(?:\\.|\[:\^?[a-z]+:\]|[^\\\]])+\]', String.Regex),
-            (r'([0-9]*)(\.\.)([0-9]*)',
-             bygroups(Number.Integer, Operator, Number.Integer)),
-            (r'(<)([^>]+)(>)', bygroups(Punctuation, Name.Class, Punctuation)),
-            (r'\{', Punctuation, 'inline_module'),
-            (r'\.', String.Regex),
-        ],
-        'inline_module': [
-            (r'\{', Other, 'ruby'),
-            (r'\}', Punctuation, '#pop'),
-            (r'[^{}]+', Other),
-        ],
-        'ruby': [
-            (r'\{', Other, '#push'),
-            (r'\}', Other, '#pop'),
-            (r'[^{}]+', Other),
-        ],
-        'space': [
-            (r'[ \t\n\r]+', Whitespace),
-            (r'#[^\n]*', Comment.Single),
-        ],
-        'end': [
-            (r'end\b', Keyword, '#pop'),
-        ],
-    }
-
-
-class TreetopLexer(DelegatingLexer):
-    """
-    A lexer for `Treetop `_ grammars.
-
-    .. versionadded:: 1.6
-    """
-
-    name = 'Treetop'
-    aliases = ['treetop']
-    filenames = ['*.treetop', '*.tt']
-
-    def __init__(self, **options):
-        super().__init__(RubyLexer, TreetopBaseLexer, **options)
-
-
-class EbnfLexer(RegexLexer):
-    """
-    Lexer for `ISO/IEC 14977 EBNF
-    `_
-    grammars.
-
-    .. versionadded:: 2.0
-    """
-
-    name = 'EBNF'
-    aliases = ['ebnf']
-    filenames = ['*.ebnf']
-    mimetypes = ['text/x-ebnf']
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            include('comment_start'),
-            include('identifier'),
-            (r'=', Operator, 'production'),
-        ],
-        'production': [
-            include('whitespace'),
-            include('comment_start'),
-            include('identifier'),
-            (r'"[^"]*"', String.Double),
-            (r"'[^']*'", String.Single),
-            (r'(\?[^?]*\?)', Name.Entity),
-            (r'[\[\]{}(),|]', Punctuation),
-            (r'-', Operator),
-            (r';', Punctuation, '#pop'),
-            (r'\.', Punctuation, '#pop'),
-        ],
-        'whitespace': [
-            (r'\s+', Text),
-        ],
-        'comment_start': [
-            (r'\(\*', Comment.Multiline, 'comment'),
-        ],
-        'comment': [
-            (r'[^*)]', Comment.Multiline),
-            include('comment_start'),
-            (r'\*\)', Comment.Multiline, '#pop'),
-            (r'[*)]', Comment.Multiline),
-        ],
-        'identifier': [
-            (r'([a-zA-Z][\w \-]*)', Keyword),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pascal.py b/venv/lib/python3.11/site-packages/pygments/lexers/pascal.py
deleted file mode 100644
index 34df192..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pascal.py
+++ /dev/null
@@ -1,641 +0,0 @@
-"""
-    pygments.lexers.pascal
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Pascal family languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.util import get_bool_opt, get_list_opt
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Error, Whitespace
-from pygments.scanner import Scanner
-
-# compatibility import
-from pygments.lexers.modula2 import Modula2Lexer
-
-__all__ = ['DelphiLexer', 'PortugolLexer']
-
-
-class PortugolLexer(Lexer):
-    """For Portugol, a Pascal dialect with keywords in Portuguese."""
-    name = 'Portugol'
-    aliases = ['portugol']
-    filenames = ['*.alg', '*.portugol']
-    mimetypes = []
-    url = "https://www.apoioinformatica.inf.br/produtos/visualg/linguagem"
-
-    def __init__(self, **options):
-        Lexer.__init__(self, **options)
-        self.lexer = DelphiLexer(**options, portugol=True)
-
-    def get_tokens_unprocessed(self, text):
-        return self.lexer.get_tokens_unprocessed(text)
-
-
-class DelphiLexer(Lexer):
-    """
-    For Delphi (Borland Object Pascal),
-    Turbo Pascal and Free Pascal source code.
-
-    Additional options accepted:
-
-    `turbopascal`
-        Highlight Turbo Pascal specific keywords (default: ``True``).
-    `delphi`
-        Highlight Borland Delphi specific keywords (default: ``True``).
-    `freepascal`
-        Highlight Free Pascal specific keywords (default: ``True``).
-    `units`
-        A list of units that should be considered builtin, supported are
-        ``System``, ``SysUtils``, ``Classes`` and ``Math``.
-        Default is to consider all of them builtin.
-    """
-    name = 'Delphi'
-    aliases = ['delphi', 'pas', 'pascal', 'objectpascal']
-    filenames = ['*.pas', '*.dpr']
-    mimetypes = ['text/x-pascal']
-
-    TURBO_PASCAL_KEYWORDS = (
-        'absolute', 'and', 'array', 'asm', 'begin', 'break', 'case',
-        'const', 'constructor', 'continue', 'destructor', 'div', 'do',
-        'downto', 'else', 'end', 'file', 'for', 'function', 'goto',
-        'if', 'implementation', 'in', 'inherited', 'inline', 'interface',
-        'label', 'mod', 'nil', 'not', 'object', 'of', 'on', 'operator',
-        'or', 'packed', 'procedure', 'program', 'record', 'reintroduce',
-        'repeat', 'self', 'set', 'shl', 'shr', 'string', 'then', 'to',
-        'type', 'unit', 'until', 'uses', 'var', 'while', 'with', 'xor'
-    )
-
-    DELPHI_KEYWORDS = (
-        'as', 'class', 'except', 'exports', 'finalization', 'finally',
-        'initialization', 'is', 'library', 'on', 'property', 'raise',
-        'threadvar', 'try'
-    )
-
-    FREE_PASCAL_KEYWORDS = (
-        'dispose', 'exit', 'false', 'new', 'true'
-    )
-
-    BLOCK_KEYWORDS = {
-        'begin', 'class', 'const', 'constructor', 'destructor', 'end',
-        'finalization', 'function', 'implementation', 'initialization',
-        'label', 'library', 'operator', 'procedure', 'program', 'property',
-        'record', 'threadvar', 'type', 'unit', 'uses', 'var'
-    }
-
-    FUNCTION_MODIFIERS = {
-        'alias', 'cdecl', 'export', 'inline', 'interrupt', 'nostackframe',
-        'pascal', 'register', 'safecall', 'softfloat', 'stdcall',
-        'varargs', 'name', 'dynamic', 'near', 'virtual', 'external',
-        'override', 'assembler'
-    }
-
-    # XXX: those aren't global. but currently we know no way for defining
-    #      them just for the type context.
-    DIRECTIVES = {
-        'absolute', 'abstract', 'assembler', 'cppdecl', 'default', 'far',
-        'far16', 'forward', 'index', 'oldfpccall', 'private', 'protected',
-        'published', 'public'
-    }
-
-    BUILTIN_TYPES = {
-        'ansichar', 'ansistring', 'bool', 'boolean', 'byte', 'bytebool',
-        'cardinal', 'char', 'comp', 'currency', 'double', 'dword',
-        'extended', 'int64', 'integer', 'iunknown', 'longbool', 'longint',
-        'longword', 'pansichar', 'pansistring', 'pbool', 'pboolean',
-        'pbyte', 'pbytearray', 'pcardinal', 'pchar', 'pcomp', 'pcurrency',
-        'pdate', 'pdatetime', 'pdouble', 'pdword', 'pextended', 'phandle',
-        'pint64', 'pinteger', 'plongint', 'plongword', 'pointer',
-        'ppointer', 'pshortint', 'pshortstring', 'psingle', 'psmallint',
-        'pstring', 'pvariant', 'pwidechar', 'pwidestring', 'pword',
-        'pwordarray', 'pwordbool', 'real', 'real48', 'shortint',
-        'shortstring', 'single', 'smallint', 'string', 'tclass', 'tdate',
-        'tdatetime', 'textfile', 'thandle', 'tobject', 'ttime', 'variant',
-        'widechar', 'widestring', 'word', 'wordbool'
-    }
-
-    BUILTIN_UNITS = {
-        'System': (
-            'abs', 'acquireexceptionobject', 'addr', 'ansitoutf8',
-            'append', 'arctan', 'assert', 'assigned', 'assignfile',
-            'beginthread', 'blockread', 'blockwrite', 'break', 'chdir',
-            'chr', 'close', 'closefile', 'comptocurrency', 'comptodouble',
-            'concat', 'continue', 'copy', 'cos', 'dec', 'delete',
-            'dispose', 'doubletocomp', 'endthread', 'enummodules',
-            'enumresourcemodules', 'eof', 'eoln', 'erase', 'exceptaddr',
-            'exceptobject', 'exclude', 'exit', 'exp', 'filepos', 'filesize',
-            'fillchar', 'finalize', 'findclasshinstance', 'findhinstance',
-            'findresourcehinstance', 'flush', 'frac', 'freemem',
-            'get8087cw', 'getdir', 'getlasterror', 'getmem',
-            'getmemorymanager', 'getmodulefilename', 'getvariantmanager',
-            'halt', 'hi', 'high', 'inc', 'include', 'initialize', 'insert',
-            'int', 'ioresult', 'ismemorymanagerset', 'isvariantmanagerset',
-            'length', 'ln', 'lo', 'low', 'mkdir', 'move', 'new', 'odd',
-            'olestrtostring', 'olestrtostrvar', 'ord', 'paramcount',
-            'paramstr', 'pi', 'pos', 'pred', 'ptr', 'pucs4chars', 'random',
-            'randomize', 'read', 'readln', 'reallocmem',
-            'releaseexceptionobject', 'rename', 'reset', 'rewrite', 'rmdir',
-            'round', 'runerror', 'seek', 'seekeof', 'seekeoln',
-            'set8087cw', 'setlength', 'setlinebreakstyle',
-            'setmemorymanager', 'setstring', 'settextbuf',
-            'setvariantmanager', 'sin', 'sizeof', 'slice', 'sqr', 'sqrt',
-            'str', 'stringofchar', 'stringtoolestr', 'stringtowidechar',
-            'succ', 'swap', 'trunc', 'truncate', 'typeinfo',
-            'ucs4stringtowidestring', 'unicodetoutf8', 'uniquestring',
-            'upcase', 'utf8decode', 'utf8encode', 'utf8toansi',
-            'utf8tounicode', 'val', 'vararrayredim', 'varclear',
-            'widecharlentostring', 'widecharlentostrvar',
-            'widechartostring', 'widechartostrvar',
-            'widestringtoucs4string', 'write', 'writeln'
-        ),
-        'SysUtils': (
-            'abort', 'addexitproc', 'addterminateproc', 'adjustlinebreaks',
-            'allocmem', 'ansicomparefilename', 'ansicomparestr',
-            'ansicomparetext', 'ansidequotedstr', 'ansiextractquotedstr',
-            'ansilastchar', 'ansilowercase', 'ansilowercasefilename',
-            'ansipos', 'ansiquotedstr', 'ansisamestr', 'ansisametext',
-            'ansistrcomp', 'ansistricomp', 'ansistrlastchar', 'ansistrlcomp',
-            'ansistrlicomp', 'ansistrlower', 'ansistrpos', 'ansistrrscan',
-            'ansistrscan', 'ansistrupper', 'ansiuppercase',
-            'ansiuppercasefilename', 'appendstr', 'assignstr', 'beep',
-            'booltostr', 'bytetocharindex', 'bytetocharlen', 'bytetype',
-            'callterminateprocs', 'changefileext', 'charlength',
-            'chartobyteindex', 'chartobytelen', 'comparemem', 'comparestr',
-            'comparetext', 'createdir', 'createguid', 'currentyear',
-            'currtostr', 'currtostrf', 'date', 'datetimetofiledate',
-            'datetimetostr', 'datetimetostring', 'datetimetosystemtime',
-            'datetimetotimestamp', 'datetostr', 'dayofweek', 'decodedate',
-            'decodedatefully', 'decodetime', 'deletefile', 'directoryexists',
-            'diskfree', 'disksize', 'disposestr', 'encodedate', 'encodetime',
-            'exceptionerrormessage', 'excludetrailingbackslash',
-            'excludetrailingpathdelimiter', 'expandfilename',
-            'expandfilenamecase', 'expanduncfilename', 'extractfiledir',
-            'extractfiledrive', 'extractfileext', 'extractfilename',
-            'extractfilepath', 'extractrelativepath', 'extractshortpathname',
-            'fileage', 'fileclose', 'filecreate', 'filedatetodatetime',
-            'fileexists', 'filegetattr', 'filegetdate', 'fileisreadonly',
-            'fileopen', 'fileread', 'filesearch', 'fileseek', 'filesetattr',
-            'filesetdate', 'filesetreadonly', 'filewrite', 'finalizepackage',
-            'findclose', 'findcmdlineswitch', 'findfirst', 'findnext',
-            'floattocurr', 'floattodatetime', 'floattodecimal', 'floattostr',
-            'floattostrf', 'floattotext', 'floattotextfmt', 'fmtloadstr',
-            'fmtstr', 'forcedirectories', 'format', 'formatbuf', 'formatcurr',
-            'formatdatetime', 'formatfloat', 'freeandnil', 'getcurrentdir',
-            'getenvironmentvariable', 'getfileversion', 'getformatsettings',
-            'getlocaleformatsettings', 'getmodulename', 'getpackagedescription',
-            'getpackageinfo', 'gettime', 'guidtostring', 'incamonth',
-            'includetrailingbackslash', 'includetrailingpathdelimiter',
-            'incmonth', 'initializepackage', 'interlockeddecrement',
-            'interlockedexchange', 'interlockedexchangeadd',
-            'interlockedincrement', 'inttohex', 'inttostr', 'isdelimiter',
-            'isequalguid', 'isleapyear', 'ispathdelimiter', 'isvalidident',
-            'languages', 'lastdelimiter', 'loadpackage', 'loadstr',
-            'lowercase', 'msecstotimestamp', 'newstr', 'nextcharindex', 'now',
-            'outofmemoryerror', 'quotedstr', 'raiselastoserror',
-            'raiselastwin32error', 'removedir', 'renamefile', 'replacedate',
-            'replacetime', 'safeloadlibrary', 'samefilename', 'sametext',
-            'setcurrentdir', 'showexception', 'sleep', 'stralloc', 'strbufsize',
-            'strbytetype', 'strcat', 'strcharlength', 'strcomp', 'strcopy',
-            'strdispose', 'strecopy', 'strend', 'strfmt', 'stricomp',
-            'stringreplace', 'stringtoguid', 'strlcat', 'strlcomp', 'strlcopy',
-            'strlen', 'strlfmt', 'strlicomp', 'strlower', 'strmove', 'strnew',
-            'strnextchar', 'strpas', 'strpcopy', 'strplcopy', 'strpos',
-            'strrscan', 'strscan', 'strtobool', 'strtobooldef', 'strtocurr',
-            'strtocurrdef', 'strtodate', 'strtodatedef', 'strtodatetime',
-            'strtodatetimedef', 'strtofloat', 'strtofloatdef', 'strtoint',
-            'strtoint64', 'strtoint64def', 'strtointdef', 'strtotime',
-            'strtotimedef', 'strupper', 'supports', 'syserrormessage',
-            'systemtimetodatetime', 'texttofloat', 'time', 'timestamptodatetime',
-            'timestamptomsecs', 'timetostr', 'trim', 'trimleft', 'trimright',
-            'tryencodedate', 'tryencodetime', 'tryfloattocurr', 'tryfloattodatetime',
-            'trystrtobool', 'trystrtocurr', 'trystrtodate', 'trystrtodatetime',
-            'trystrtofloat', 'trystrtoint', 'trystrtoint64', 'trystrtotime',
-            'unloadpackage', 'uppercase', 'widecomparestr', 'widecomparetext',
-            'widefmtstr', 'wideformat', 'wideformatbuf', 'widelowercase',
-            'widesamestr', 'widesametext', 'wideuppercase', 'win32check',
-            'wraptext'
-        ),
-        'Classes': (
-            'activateclassgroup', 'allocatehwnd', 'bintohex', 'checksynchronize',
-            'collectionsequal', 'countgenerations', 'deallocatehwnd', 'equalrect',
-            'extractstrings', 'findclass', 'findglobalcomponent', 'getclass',
-            'groupdescendantswith', 'hextobin', 'identtoint',
-            'initinheritedcomponent', 'inttoident', 'invalidpoint',
-            'isuniqueglobalcomponentname', 'linestart', 'objectbinarytotext',
-            'objectresourcetotext', 'objecttexttobinary', 'objecttexttoresource',
-            'pointsequal', 'readcomponentres', 'readcomponentresex',
-            'readcomponentresfile', 'rect', 'registerclass', 'registerclassalias',
-            'registerclasses', 'registercomponents', 'registerintegerconsts',
-            'registernoicon', 'registernonactivex', 'smallpoint', 'startclassgroup',
-            'teststreamformat', 'unregisterclass', 'unregisterclasses',
-            'unregisterintegerconsts', 'unregistermoduleclasses',
-            'writecomponentresfile'
-        ),
-        'Math': (
-            'arccos', 'arccosh', 'arccot', 'arccoth', 'arccsc', 'arccsch', 'arcsec',
-            'arcsech', 'arcsin', 'arcsinh', 'arctan2', 'arctanh', 'ceil',
-            'comparevalue', 'cosecant', 'cosh', 'cot', 'cotan', 'coth', 'csc',
-            'csch', 'cycletodeg', 'cycletograd', 'cycletorad', 'degtocycle',
-            'degtograd', 'degtorad', 'divmod', 'doubledecliningbalance',
-            'ensurerange', 'floor', 'frexp', 'futurevalue', 'getexceptionmask',
-            'getprecisionmode', 'getroundmode', 'gradtocycle', 'gradtodeg',
-            'gradtorad', 'hypot', 'inrange', 'interestpayment', 'interestrate',
-            'internalrateofreturn', 'intpower', 'isinfinite', 'isnan', 'iszero',
-            'ldexp', 'lnxp1', 'log10', 'log2', 'logn', 'max', 'maxintvalue',
-            'maxvalue', 'mean', 'meanandstddev', 'min', 'minintvalue', 'minvalue',
-            'momentskewkurtosis', 'netpresentvalue', 'norm', 'numberofperiods',
-            'payment', 'periodpayment', 'poly', 'popnstddev', 'popnvariance',
-            'power', 'presentvalue', 'radtocycle', 'radtodeg', 'radtograd',
-            'randg', 'randomrange', 'roundto', 'samevalue', 'sec', 'secant',
-            'sech', 'setexceptionmask', 'setprecisionmode', 'setroundmode',
-            'sign', 'simpleroundto', 'sincos', 'sinh', 'slndepreciation', 'stddev',
-            'sum', 'sumint', 'sumofsquares', 'sumsandsquares', 'syddepreciation',
-            'tan', 'tanh', 'totalvariance', 'variance'
-        )
-    }
-
-    ASM_REGISTERS = {
-        'ah', 'al', 'ax', 'bh', 'bl', 'bp', 'bx', 'ch', 'cl', 'cr0',
-        'cr1', 'cr2', 'cr3', 'cr4', 'cs', 'cx', 'dh', 'di', 'dl', 'dr0',
-        'dr1', 'dr2', 'dr3', 'dr4', 'dr5', 'dr6', 'dr7', 'ds', 'dx',
-        'eax', 'ebp', 'ebx', 'ecx', 'edi', 'edx', 'es', 'esi', 'esp',
-        'fs', 'gs', 'mm0', 'mm1', 'mm2', 'mm3', 'mm4', 'mm5', 'mm6',
-        'mm7', 'si', 'sp', 'ss', 'st0', 'st1', 'st2', 'st3', 'st4', 'st5',
-        'st6', 'st7', 'xmm0', 'xmm1', 'xmm2', 'xmm3', 'xmm4', 'xmm5',
-        'xmm6', 'xmm7'
-    }
-
-    ASM_INSTRUCTIONS = {
-        'aaa', 'aad', 'aam', 'aas', 'adc', 'add', 'and', 'arpl', 'bound',
-        'bsf', 'bsr', 'bswap', 'bt', 'btc', 'btr', 'bts', 'call', 'cbw',
-        'cdq', 'clc', 'cld', 'cli', 'clts', 'cmc', 'cmova', 'cmovae',
-        'cmovb', 'cmovbe', 'cmovc', 'cmovcxz', 'cmove', 'cmovg',
-        'cmovge', 'cmovl', 'cmovle', 'cmovna', 'cmovnae', 'cmovnb',
-        'cmovnbe', 'cmovnc', 'cmovne', 'cmovng', 'cmovnge', 'cmovnl',
-        'cmovnle', 'cmovno', 'cmovnp', 'cmovns', 'cmovnz', 'cmovo',
-        'cmovp', 'cmovpe', 'cmovpo', 'cmovs', 'cmovz', 'cmp', 'cmpsb',
-        'cmpsd', 'cmpsw', 'cmpxchg', 'cmpxchg486', 'cmpxchg8b', 'cpuid',
-        'cwd', 'cwde', 'daa', 'das', 'dec', 'div', 'emms', 'enter', 'hlt',
-        'ibts', 'icebp', 'idiv', 'imul', 'in', 'inc', 'insb', 'insd',
-        'insw', 'int', 'int01', 'int03', 'int1', 'int3', 'into', 'invd',
-        'invlpg', 'iret', 'iretd', 'iretw', 'ja', 'jae', 'jb', 'jbe',
-        'jc', 'jcxz', 'jcxz', 'je', 'jecxz', 'jg', 'jge', 'jl', 'jle',
-        'jmp', 'jna', 'jnae', 'jnb', 'jnbe', 'jnc', 'jne', 'jng', 'jnge',
-        'jnl', 'jnle', 'jno', 'jnp', 'jns', 'jnz', 'jo', 'jp', 'jpe',
-        'jpo', 'js', 'jz', 'lahf', 'lar', 'lcall', 'lds', 'lea', 'leave',
-        'les', 'lfs', 'lgdt', 'lgs', 'lidt', 'ljmp', 'lldt', 'lmsw',
-        'loadall', 'loadall286', 'lock', 'lodsb', 'lodsd', 'lodsw',
-        'loop', 'loope', 'loopne', 'loopnz', 'loopz', 'lsl', 'lss', 'ltr',
-        'mov', 'movd', 'movq', 'movsb', 'movsd', 'movsw', 'movsx',
-        'movzx', 'mul', 'neg', 'nop', 'not', 'or', 'out', 'outsb', 'outsd',
-        'outsw', 'pop', 'popa', 'popad', 'popaw', 'popf', 'popfd', 'popfw',
-        'push', 'pusha', 'pushad', 'pushaw', 'pushf', 'pushfd', 'pushfw',
-        'rcl', 'rcr', 'rdmsr', 'rdpmc', 'rdshr', 'rdtsc', 'rep', 'repe',
-        'repne', 'repnz', 'repz', 'ret', 'retf', 'retn', 'rol', 'ror',
-        'rsdc', 'rsldt', 'rsm', 'sahf', 'sal', 'salc', 'sar', 'sbb',
-        'scasb', 'scasd', 'scasw', 'seta', 'setae', 'setb', 'setbe',
-        'setc', 'setcxz', 'sete', 'setg', 'setge', 'setl', 'setle',
-        'setna', 'setnae', 'setnb', 'setnbe', 'setnc', 'setne', 'setng',
-        'setnge', 'setnl', 'setnle', 'setno', 'setnp', 'setns', 'setnz',
-        'seto', 'setp', 'setpe', 'setpo', 'sets', 'setz', 'sgdt', 'shl',
-        'shld', 'shr', 'shrd', 'sidt', 'sldt', 'smi', 'smint', 'smintold',
-        'smsw', 'stc', 'std', 'sti', 'stosb', 'stosd', 'stosw', 'str',
-        'sub', 'svdc', 'svldt', 'svts', 'syscall', 'sysenter', 'sysexit',
-        'sysret', 'test', 'ud1', 'ud2', 'umov', 'verr', 'verw', 'wait',
-        'wbinvd', 'wrmsr', 'wrshr', 'xadd', 'xbts', 'xchg', 'xlat',
-        'xlatb', 'xor'
-    }
-
-    PORTUGOL_KEYWORDS = (
-        'aleatorio',
-        'algoritmo',
-        'arquivo',
-        'ate',
-        'caso',
-        'cronometro',
-        'debug',
-        'e',
-        'eco',
-        'enquanto',
-        'entao',
-        'escolha',
-        'escreva',
-        'escreval',
-        'faca',
-        'falso',
-        'fimalgoritmo',
-        'fimenquanto',
-        'fimescolha',
-        'fimfuncao',
-        'fimpara',
-        'fimprocedimento',
-        'fimrepita',
-        'fimse',
-        'funcao',
-        'inicio',
-        'int',
-        'interrompa',
-        'leia',
-        'limpatela',
-        'mod',
-        'nao',
-        'ou',
-        'outrocaso',
-        'para',
-        'passo',
-        'pausa',
-        'procedimento',
-        'repita',
-        'retorne',
-        'se',
-        'senao',
-        'timer',
-        'var',
-        'vetor',
-        'verdadeiro',
-        'xou',
-        'div',
-        'mod',
-        'abs',
-        'arccos',
-        'arcsen',
-        'arctan',
-        'cos',
-        'cotan',
-        'Exp',
-        'grauprad',
-        'int',
-        'log',
-        'logn',
-        'pi',
-        'quad',
-        'radpgrau',
-        'raizq',
-        'rand',
-        'randi',
-        'sen',
-        'Tan',
-        'asc',
-        'carac',
-        'caracpnum',
-        'compr',
-        'copia',
-        'maiusc',
-        'minusc',
-        'numpcarac',
-        'pos',
-    )
-
-    PORTUGOL_BUILTIN_TYPES = {
-        'inteiro', 'real', 'caractere', 'logico'
-    }
-
-    def __init__(self, **options):
-        Lexer.__init__(self, **options)
-        self.keywords = set()
-        self.builtins = set()
-        if get_bool_opt(options, 'portugol', False):
-            self.keywords.update(self.PORTUGOL_KEYWORDS)
-            self.builtins.update(self.PORTUGOL_BUILTIN_TYPES)
-            self.is_portugol = True
-        else:
-            self.is_portugol = False
-
-            if get_bool_opt(options, 'turbopascal', True):
-                self.keywords.update(self.TURBO_PASCAL_KEYWORDS)
-            if get_bool_opt(options, 'delphi', True):
-                self.keywords.update(self.DELPHI_KEYWORDS)
-            if get_bool_opt(options, 'freepascal', True):
-                self.keywords.update(self.FREE_PASCAL_KEYWORDS)
-            for unit in get_list_opt(options, 'units', list(self.BUILTIN_UNITS)):
-                self.builtins.update(self.BUILTIN_UNITS[unit])
-
-    def get_tokens_unprocessed(self, text):
-        scanner = Scanner(text, re.DOTALL | re.MULTILINE | re.IGNORECASE)
-        stack = ['initial']
-        in_function_block = False
-        in_property_block = False
-        was_dot = False
-        next_token_is_function = False
-        next_token_is_property = False
-        collect_labels = False
-        block_labels = set()
-        brace_balance = [0, 0]
-
-        while not scanner.eos:
-            token = Error
-
-            if stack[-1] == 'initial':
-                if scanner.scan(r'\s+'):
-                    token = Whitespace
-                elif not self.is_portugol and scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
-                    if scanner.match.startswith('$'):
-                        token = Comment.Preproc
-                    else:
-                        token = Comment.Multiline
-                elif scanner.scan(r'//.*?$'):
-                    token = Comment.Single
-                elif self.is_portugol and scanner.scan(r'(<\-)|(>=)|(<=)|%|<|>|-|\+|\*|\=|(<>)|\/|\.|:|,'):
-                    token = Operator
-                elif not self.is_portugol and scanner.scan(r'[-+*\/=<>:;,.@\^]'):
-                    token = Operator
-                    # stop label highlighting on next ";"
-                    if collect_labels and scanner.match == ';':
-                        collect_labels = False
-                elif scanner.scan(r'[\(\)\[\]]+'):
-                    token = Punctuation
-                    # abort function naming ``foo = Function(...)``
-                    next_token_is_function = False
-                    # if we are in a function block we count the open
-                    # braces because ootherwise it's impossible to
-                    # determine the end of the modifier context
-                    if in_function_block or in_property_block:
-                        if scanner.match == '(':
-                            brace_balance[0] += 1
-                        elif scanner.match == ')':
-                            brace_balance[0] -= 1
-                        elif scanner.match == '[':
-                            brace_balance[1] += 1
-                        elif scanner.match == ']':
-                            brace_balance[1] -= 1
-                elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
-                    lowercase_name = scanner.match.lower()
-                    if lowercase_name == 'result':
-                        token = Name.Builtin.Pseudo
-                    elif lowercase_name in self.keywords:
-                        token = Keyword
-                        # if we are in a special block and a
-                        # block ending keyword occurs (and the parenthesis
-                        # is balanced) we end the current block context
-                        if self.is_portugol:
-                            if lowercase_name in ('funcao', 'procedimento'):
-                                in_function_block = True
-                                next_token_is_function = True
-                        else:
-                            if (in_function_block or in_property_block) and \
-                                    lowercase_name in self.BLOCK_KEYWORDS and \
-                                    brace_balance[0] <= 0 and \
-                                    brace_balance[1] <= 0:
-                                in_function_block = False
-                                in_property_block = False
-                                brace_balance = [0, 0]
-                                block_labels = set()
-                            if lowercase_name in ('label', 'goto'):
-                                collect_labels = True
-                            elif lowercase_name == 'asm':
-                                stack.append('asm')
-                            elif lowercase_name == 'property':
-                                in_property_block = True
-                                next_token_is_property = True
-                            elif lowercase_name in ('procedure', 'operator',
-                                                    'function', 'constructor',
-                                                    'destructor'):
-                                in_function_block = True
-                                next_token_is_function = True
-                    # we are in a function block and the current name
-                    # is in the set of registered modifiers. highlight
-                    # it as pseudo keyword
-                    elif not self.is_portugol and in_function_block and \
-                            lowercase_name in self.FUNCTION_MODIFIERS:
-                        token = Keyword.Pseudo
-                    # if we are in a property highlight some more
-                    # modifiers
-                    elif not self.is_portugol and in_property_block and \
-                            lowercase_name in ('read', 'write'):
-                        token = Keyword.Pseudo
-                        next_token_is_function = True
-                    # if the last iteration set next_token_is_function
-                    # to true we now want this name highlighted as
-                    # function. so do that and reset the state
-                    elif next_token_is_function:
-                        # Look if the next token is a dot. If yes it's
-                        # not a function, but a class name and the
-                        # part after the dot a function name
-                        if not self.is_portugol and scanner.test(r'\s*\.\s*'):
-                            token = Name.Class
-                        # it's not a dot, our job is done
-                        else:
-                            token = Name.Function
-                            next_token_is_function = False
-
-                            if self.is_portugol:
-                                block_labels.add(scanner.match.lower())
-
-                    # same for properties
-                    elif not self.is_portugol and next_token_is_property:
-                        token = Name.Property
-                        next_token_is_property = False
-                    # Highlight this token as label and add it
-                    # to the list of known labels
-                    elif not self.is_portugol and collect_labels:
-                        token = Name.Label
-                        block_labels.add(scanner.match.lower())
-                    # name is in list of known labels
-                    elif lowercase_name in block_labels:
-                        token = Name.Label
-                    elif self.is_portugol and lowercase_name in self.PORTUGOL_BUILTIN_TYPES:
-                        token = Keyword.Type
-                    elif not self.is_portugol and lowercase_name in self.BUILTIN_TYPES:
-                        token = Keyword.Type
-                    elif not self.is_portugol and lowercase_name in self.DIRECTIVES:
-                        token = Keyword.Pseudo
-                    # builtins are just builtins if the token
-                    # before isn't a dot
-                    elif not self.is_portugol and not was_dot and lowercase_name in self.builtins:
-                        token = Name.Builtin
-                    else:
-                        token = Name
-                elif self.is_portugol and scanner.scan(r"\""):
-                    token = String
-                    stack.append('string')
-                elif not self.is_portugol and scanner.scan(r"'"):
-                    token = String
-                    stack.append('string')
-                elif not self.is_portugol and scanner.scan(r'\#(\d+|\$[0-9A-Fa-f]+)'):
-                    token = String.Char
-                elif not self.is_portugol and scanner.scan(r'\$[0-9A-Fa-f]+'):
-                    token = Number.Hex
-                elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
-                    token = Number.Integer
-                elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
-                    token = Number.Float
-                else:
-                    # if the stack depth is deeper than once, pop
-                    if len(stack) > 1:
-                        stack.pop()
-                    scanner.get_char()
-
-            elif stack[-1] == 'string':
-                if self.is_portugol:
-                    if scanner.scan(r"''"):
-                        token = String.Escape
-                    elif scanner.scan(r"\""):
-                        token = String
-                        stack.pop()
-                    elif scanner.scan(r"[^\"]*"):
-                        token = String
-                    else:
-                        scanner.get_char()
-                        stack.pop()
-                else:
-                    if scanner.scan(r"''"):
-                        token = String.Escape
-                    elif scanner.scan(r"'"):
-                        token = String
-                        stack.pop()
-                    elif scanner.scan(r"[^']*"):
-                        token = String
-                    else:
-                        scanner.get_char()
-                        stack.pop()
-            elif not self.is_portugol and stack[-1] == 'asm':
-                if scanner.scan(r'\s+'):
-                    token = Whitespace
-                elif scanner.scan(r'end'):
-                    token = Keyword
-                    stack.pop()
-                elif scanner.scan(r'\{.*?\}|\(\*.*?\*\)'):
-                    if scanner.match.startswith('$'):
-                        token = Comment.Preproc
-                    else:
-                        token = Comment.Multiline
-                elif scanner.scan(r'//.*?$'):
-                    token = Comment.Single
-                elif scanner.scan(r"'"):
-                    token = String
-                    stack.append('string')
-                elif scanner.scan(r'@@[A-Za-z_][A-Za-z_0-9]*'):
-                    token = Name.Label
-                elif scanner.scan(r'[A-Za-z_][A-Za-z_0-9]*'):
-                    lowercase_name = scanner.match.lower()
-                    if lowercase_name in self.ASM_INSTRUCTIONS:
-                        token = Keyword
-                    elif lowercase_name in self.ASM_REGISTERS:
-                        token = Name.Builtin
-                    else:
-                        token = Name
-                elif scanner.scan(r'[-+*\/=<>:;,.@\^]+'):
-                    token = Operator
-                elif scanner.scan(r'[\(\)\[\]]+'):
-                    token = Punctuation
-                elif scanner.scan(r'\$[0-9A-Fa-f]+'):
-                    token = Number.Hex
-                elif scanner.scan(r'\d+(?![eE]|\.[^.])'):
-                    token = Number.Integer
-                elif scanner.scan(r'\d+(\.\d+([eE][+-]?\d+)?|[eE][+-]?\d+)'):
-                    token = Number.Float
-                else:
-                    scanner.get_char()
-                    stack.pop()
-
-            # save the dot!!!11
-            if not self.is_portugol and scanner.match.strip():
-                was_dot = scanner.match == '.'
-
-            yield scanner.start_pos, token, scanner.match or ''
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pawn.py b/venv/lib/python3.11/site-packages/pygments/lexers/pawn.py
deleted file mode 100644
index 36b48fc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pawn.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""
-    pygments.lexers.pawn
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the Pawn languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-from pygments.util import get_bool_opt
-
-__all__ = ['SourcePawnLexer', 'PawnLexer']
-
-
-class SourcePawnLexer(RegexLexer):
-    """
-    For SourcePawn source code with preprocessor directives.
-
-    .. versionadded:: 1.6
-    """
-    name = 'SourcePawn'
-    aliases = ['sp']
-    filenames = ['*.sp']
-    mimetypes = ['text/x-sourcepawn']
-
-    #: optional Comment or Whitespace
-    _ws = r'(?:\s|//.*?\n|/\*.*?\*/)+'
-    #: only one /* */ style comment
-    _ws1 = r'\s*(?:/[*].*?[*]/\s*)*'
-
-    tokens = {
-        'root': [
-            # preprocessor directives: without whitespace
-            (r'^#if\s+0', Comment.Preproc, 'if0'),
-            ('^#', Comment.Preproc, 'macro'),
-            # or with whitespace
-            ('^' + _ws1 + r'#if\s+0', Comment.Preproc, 'if0'),
-            ('^' + _ws1 + '#', Comment.Preproc, 'macro'),
-            (r'\n', Text),
-            (r'\s+', Text),
-            (r'\\\n', Text),  # line continuation
-            (r'/(\\\n)?/(\n|(.|\n)*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?\*(.|\n)*?\*(\\\n)?/', Comment.Multiline),
-            (r'[{}]', Punctuation),
-            (r'L?"', String, 'string'),
-            (r"L?'(\\.|\\[0-7]{1,3}|\\x[a-fA-F0-9]{1,2}|[^\\\'\n])'", String.Char),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+[LlUu]*', Number.Float),
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'0x[0-9a-fA-F]+[LlUu]*', Number.Hex),
-            (r'0[0-7]+[LlUu]*', Number.Oct),
-            (r'\d+[LlUu]*', Number.Integer),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[()\[\],.;]', Punctuation),
-            (r'(case|const|continue|native|'
-             r'default|else|enum|for|if|new|operator|'
-             r'public|return|sizeof|static|decl|struct|switch)\b', Keyword),
-            (r'(bool|Float)\b', Keyword.Type),
-            (r'(true|false)\b', Keyword.Constant),
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String),  # all other characters
-            (r'\\\n', String),       # line continuation
-            (r'\\', String),         # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/\*(.|\n)*?\*/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?/-]', Operator),
-            (r'[()\[\],.;]', Punctuation),
-            (r'(switch|case|default|const|new|static|char|continue|break|'
-             r'if|else|for|while|do|operator|enum|'
-             r'public|return|sizeof|tagof|state|goto)\b', Keyword),
-            (r'(bool|Float)\b', Keyword.Type),
-            (r'(true|false)\b', Keyword.Constant),
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\([\\abfnrtv"\']|x[a-fA-F0-9]{2,4}|[0-7]{1,3})', String.Escape),
-            (r'[^\\"\n]+', String),  # all other characters
-            (r'\\\n', String),       # line continuation
-            (r'\\', String),         # stray backslash
-        ],
-        'macro': [
-            (r'[^/\n]+', Comment.Preproc),
-            (r'/\*(.|\n)*?\*/', Comment.Multiline),
-            (r'//.*?\n', Comment.Single, '#pop'),
-            (r'/', Comment.Preproc),
-            (r'(?<=\\)\n', Comment.Preproc),
-            (r'\n', Comment.Preproc, '#pop'),
-        ],
-        'if0': [
-            (r'^\s*#if.*?(?])*>[egimosx]*', String.Regex, '#pop'),
-            (r'\[(\\\\|\\[^\\]|[^\\\]])*\][egimosx]*', String.Regex, '#pop'),
-            (r'\((\\\\|\\[^\\]|[^\\)])*\)[egimosx]*', String.Regex, '#pop'),
-            (r'@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*', String.Regex, '#pop'),
-            (r'%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*', String.Regex, '#pop'),
-            (r'\$(\\\\|\\[^\\]|[^\\$])*\$[egimosx]*', String.Regex, '#pop'),
-        ],
-        'root': [
-            (r'\A\#!.+?$', Comment.Hashbang),
-            (r'\#.*?$', Comment.Single),
-            (r'^=[a-zA-Z0-9]+\s+.*?\n=cut', Comment.Multiline),
-            (words((
-                'case', 'continue', 'do', 'else', 'elsif', 'for', 'foreach',
-                'if', 'last', 'my', 'next', 'our', 'redo', 'reset', 'then',
-                'unless', 'until', 'while', 'print', 'new', 'BEGIN',
-                'CHECK', 'INIT', 'END', 'return'), suffix=r'\b'),
-             Keyword),
-            (r'(format)(\s+)(\w+)(\s*)(=)(\s*\n)',
-             bygroups(Keyword, Whitespace, Name, Whitespace, Punctuation, Whitespace), 'format'),
-            (r'(eq|lt|gt|le|ge|ne|not|and|or|cmp)\b', Operator.Word),
-            # common delimiters
-            (r's/(\\\\|\\[^\\]|[^\\/])*/(\\\\|\\[^\\]|[^\\/])*/[egimosx]*',
-                String.Regex),
-            (r's!(\\\\|\\!|[^!])*!(\\\\|\\!|[^!])*![egimosx]*', String.Regex),
-            (r's\\(\\\\|[^\\])*\\(\\\\|[^\\])*\\[egimosx]*', String.Regex),
-            (r's@(\\\\|\\[^\\]|[^\\@])*@(\\\\|\\[^\\]|[^\\@])*@[egimosx]*',
-                String.Regex),
-            (r's%(\\\\|\\[^\\]|[^\\%])*%(\\\\|\\[^\\]|[^\\%])*%[egimosx]*',
-                String.Regex),
-            # balanced delimiters
-            (r's\{(\\\\|\\[^\\]|[^\\}])*\}\s*', String.Regex, 'balanced-regex'),
-            (r's<(\\\\|\\[^\\]|[^\\>])*>\s*', String.Regex, 'balanced-regex'),
-            (r's\[(\\\\|\\[^\\]|[^\\\]])*\]\s*', String.Regex,
-                'balanced-regex'),
-            (r's\((\\\\|\\[^\\]|[^\\)])*\)\s*', String.Regex,
-                'balanced-regex'),
-
-            (r'm?/(\\\\|\\[^\\]|[^\\/\n])*/[gcimosx]*', String.Regex),
-            (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
-            (r'((?<==~)|(?<=\())\s*/(\\\\|\\[^\\]|[^\\/])*/[gcimosx]*',
-                String.Regex),
-            (r'\s+', Whitespace),
-            (words((
-                'abs', 'accept', 'alarm', 'atan2', 'bind', 'binmode', 'bless', 'caller', 'chdir',
-                'chmod', 'chomp', 'chop', 'chown', 'chr', 'chroot', 'close', 'closedir', 'connect',
-                'continue', 'cos', 'crypt', 'dbmclose', 'dbmopen', 'defined', 'delete', 'die',
-                'dump', 'each', 'endgrent', 'endhostent', 'endnetent', 'endprotoent',
-                'endpwent', 'endservent', 'eof', 'eval', 'exec', 'exists', 'exit', 'exp', 'fcntl',
-                'fileno', 'flock', 'fork', 'format', 'formline', 'getc', 'getgrent', 'getgrgid',
-                'getgrnam', 'gethostbyaddr', 'gethostbyname', 'gethostent', 'getlogin',
-                'getnetbyaddr', 'getnetbyname', 'getnetent', 'getpeername', 'getpgrp',
-                'getppid', 'getpriority', 'getprotobyname', 'getprotobynumber',
-                'getprotoent', 'getpwent', 'getpwnam', 'getpwuid', 'getservbyname',
-                'getservbyport', 'getservent', 'getsockname', 'getsockopt', 'glob', 'gmtime',
-                'goto', 'grep', 'hex', 'import', 'index', 'int', 'ioctl', 'join', 'keys', 'kill', 'last',
-                'lc', 'lcfirst', 'length', 'link', 'listen', 'local', 'localtime', 'log', 'lstat',
-                'map', 'mkdir', 'msgctl', 'msgget', 'msgrcv', 'msgsnd', 'my', 'next', 'oct', 'open',
-                'opendir', 'ord', 'our', 'pack', 'pipe', 'pop', 'pos', 'printf',
-                'prototype', 'push', 'quotemeta', 'rand', 'read', 'readdir',
-                'readline', 'readlink', 'readpipe', 'recv', 'redo', 'ref', 'rename',
-                'reverse', 'rewinddir', 'rindex', 'rmdir', 'scalar', 'seek', 'seekdir',
-                'select', 'semctl', 'semget', 'semop', 'send', 'setgrent', 'sethostent', 'setnetent',
-                'setpgrp', 'setpriority', 'setprotoent', 'setpwent', 'setservent',
-                'setsockopt', 'shift', 'shmctl', 'shmget', 'shmread', 'shmwrite', 'shutdown',
-                'sin', 'sleep', 'socket', 'socketpair', 'sort', 'splice', 'split', 'sprintf', 'sqrt',
-                'srand', 'stat', 'study', 'substr', 'symlink', 'syscall', 'sysopen', 'sysread',
-                'sysseek', 'system', 'syswrite', 'tell', 'telldir', 'tie', 'tied', 'time', 'times', 'tr',
-                'truncate', 'uc', 'ucfirst', 'umask', 'undef', 'unlink', 'unpack', 'unshift', 'untie',
-                'utime', 'values', 'vec', 'wait', 'waitpid', 'wantarray', 'warn', 'write'), suffix=r'\b'),
-             Name.Builtin),
-            (r'((__(DATA|DIE|WARN)__)|(STD(IN|OUT|ERR)))\b', Name.Builtin.Pseudo),
-            (r'(<<)([\'"]?)([a-zA-Z_]\w*)(\2;?\n.*?\n)(\3)(\n)',
-             bygroups(String, String, String.Delimiter, String, String.Delimiter, Whitespace)),
-            (r'__END__', Comment.Preproc, 'end-part'),
-            (r'\$\^[ADEFHILMOPSTWX]', Name.Variable.Global),
-            (r"\$[\\\"\[\]'&`+*.,;=%~?@$!<>(^|/-](?!\w)", Name.Variable.Global),
-            (r'[$@%#]+', Name.Variable, 'varname'),
-            (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
-            (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
-            (r'0b[01]+(_[01]+)*', Number.Bin),
-            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
-             Number.Float),
-            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
-            (r'\d+(_\d+)*', Number.Integer),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            (r'`(\\\\|\\[^\\]|[^`\\])*`', String.Backtick),
-            (r'<([^\s>]+)>', String.Regex),
-            (r'(q|qq|qw|qr|qx)\{', String.Other, 'cb-string'),
-            (r'(q|qq|qw|qr|qx)\(', String.Other, 'rb-string'),
-            (r'(q|qq|qw|qr|qx)\[', String.Other, 'sb-string'),
-            (r'(q|qq|qw|qr|qx)\<', String.Other, 'lt-string'),
-            (r'(q|qq|qw|qr|qx)([\W_])(.|\n)*?\2', String.Other),
-            (r'(package)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
-             bygroups(Keyword, Whitespace, Name.Namespace)),
-            (r'(use|require|no)(\s+)([a-zA-Z_]\w*(?:::[a-zA-Z_]\w*)*)',
-             bygroups(Keyword, Whitespace, Name.Namespace)),
-            (r'(sub)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
-            (words((
-                'no', 'package', 'require', 'use'), suffix=r'\b'),
-             Keyword),
-            (r'(\[\]|\*\*|::|<<|>>|>=|<=>|<=|={3}|!=|=~|'
-             r'!~|&&?|\|\||\.{1,3})', Operator),
-            (r'[-+/*%=<>&^|!\\~]=?', Operator),
-            (r'[()\[\]:;,<>/?{}]', Punctuation),  # yes, there's no shortage
-                                                  # of punctuation in Perl!
-            (r'(?=\w)', Name, 'name'),
-        ],
-        'format': [
-            (r'\.\n', String.Interpol, '#pop'),
-            (r'[^\n]*\n', String.Interpol),
-        ],
-        'varname': [
-            (r'\s+', Whitespace),
-            (r'\{', Punctuation, '#pop'),    # hash syntax?
-            (r'\)|,', Punctuation, '#pop'),  # argument specifier
-            (r'\w+::', Name.Namespace),
-            (r'[\w:]+', Name.Variable, '#pop'),
-        ],
-        'name': [
-            (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*(::)?(?=\s*->)', Name.Namespace, '#pop'),
-            (r'[a-zA-Z_]\w*(::[a-zA-Z_]\w*)*::', Name.Namespace, '#pop'),
-            (r'[\w:]+', Name, '#pop'),
-            (r'[A-Z_]+(?=\W)', Name.Constant, '#pop'),
-            (r'(?=\W)', Text, '#pop'),
-        ],
-        'funcname': [
-            (r'[a-zA-Z_]\w*[!?]?', Name.Function),
-            (r'\s+', Whitespace),
-            # argument declaration
-            (r'(\([$@%]*\))(\s*)', bygroups(Punctuation, Whitespace)),
-            (r';', Punctuation, '#pop'),
-            (r'.*?\{', Punctuation, '#pop'),
-        ],
-        'cb-string': [
-            (r'\\[{}\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\{', String.Other, 'cb-string'),
-            (r'\}', String.Other, '#pop'),
-            (r'[^{}\\]+', String.Other)
-        ],
-        'rb-string': [
-            (r'\\[()\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\(', String.Other, 'rb-string'),
-            (r'\)', String.Other, '#pop'),
-            (r'[^()]+', String.Other)
-        ],
-        'sb-string': [
-            (r'\\[\[\]\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\[', String.Other, 'sb-string'),
-            (r'\]', String.Other, '#pop'),
-            (r'[^\[\]]+', String.Other)
-        ],
-        'lt-string': [
-            (r'\\[<>\\]', String.Other),
-            (r'\\', String.Other),
-            (r'\<', String.Other, 'lt-string'),
-            (r'\>', String.Other, '#pop'),
-            (r'[^<>]+', String.Other)
-        ],
-        'end-part': [
-            (r'.+', Comment.Preproc, '#pop')
-        ]
-    }
-
-    def analyse_text(text):
-        if shebang_matches(text, r'perl'):
-            return True
-
-        result = 0
-
-        if re.search(r'(?:my|our)\s+[$@%(]', text):
-            result += 0.9
-
-        if ':=' in text:
-            # := is not valid Perl, but it appears in unicon, so we should
-            # become less confident if we think we found Perl with :=
-            result /= 2
-
-        return result
-
-
-class Perl6Lexer(ExtendedRegexLexer):
-    """
-    For Raku (a.k.a. Perl 6) source code.
-
-    .. versionadded:: 2.0
-    """
-
-    name = 'Perl6'
-    url = 'https://www.raku.org'
-    aliases = ['perl6', 'pl6', 'raku']
-    filenames = ['*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6',
-                 '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod',
-                 '*.rakutest', '*.rakudoc']
-    mimetypes = ['text/x-perl6', 'application/x-perl6']
-    flags = re.MULTILINE | re.DOTALL
-
-    PERL6_IDENTIFIER_RANGE = r"['\w:-]"
-
-    PERL6_KEYWORDS = (
-        #Phasers
-        'BEGIN','CATCH','CHECK','CLOSE','CONTROL','DOC','END','ENTER','FIRST',
-        'INIT','KEEP','LAST','LEAVE','NEXT','POST','PRE','QUIT','UNDO',
-        #Keywords
-        'anon','augment','but','class','constant','default','does','else',
-        'elsif','enum','for','gather','given','grammar','has','if','import',
-        'is','let','loop','made','make','method','module','multi','my','need',
-        'orwith','our','proceed','proto','repeat','require','return',
-        'return-rw','returns','role','rule','state','sub','submethod','subset',
-        'succeed','supersede','token','try','unit','unless','until','use',
-        'when','while','with','without',
-        #Traits
-        'export','native','repr','required','rw','symbol',
-    )
-
-    PERL6_BUILTINS = (
-        'ACCEPTS','abs','abs2rel','absolute','accept','accessed','acos',
-        'acosec','acosech','acosh','acotan','acotanh','acquire','act','action',
-        'actions','add','add_attribute','add_enum_value','add_fallback',
-        'add_method','add_parent','add_private_method','add_role','add_trustee',
-        'adverb','after','all','allocate','allof','allowed','alternative-names',
-        'annotations','antipair','antipairs','any','anyof','app_lifetime',
-        'append','arch','archname','args','arity','Array','asec','asech','asin',
-        'asinh','ASSIGN-KEY','ASSIGN-POS','assuming','ast','at','atan','atan2',
-        'atanh','AT-KEY','atomic-assign','atomic-dec-fetch','atomic-fetch',
-        'atomic-fetch-add','atomic-fetch-dec','atomic-fetch-inc',
-        'atomic-fetch-sub','atomic-inc-fetch','AT-POS','attributes','auth',
-        'await','backtrace','Bag','BagHash','bail-out','base','basename',
-        'base-repeating','batch','BIND-KEY','BIND-POS','bind-stderr',
-        'bind-stdin','bind-stdout','bind-udp','bits','bless','block','Bool',
-        'bool-only','bounds','break','Bridge','broken','BUILD','build-date',
-        'bytes','cache','callframe','calling-package','CALL-ME','callsame',
-        'callwith','can','cancel','candidates','cando','can-ok','canonpath',
-        'caps','caption','Capture','cas','catdir','categorize','categorize-list',
-        'catfile','catpath','cause','ceiling','cglobal','changed','Channel',
-        'chars','chdir','child','child-name','child-typename','chmod','chomp',
-        'chop','chr','chrs','chunks','cis','classify','classify-list','cleanup',
-        'clone','close','closed','close-stdin','cmp-ok','code','codes','collate',
-        'column','comb','combinations','command','comment','compiler','Complex',
-        'compose','compose_type','composer','condition','config',
-        'configure_destroy','configure_type_checking','conj','connect',
-        'constraints','construct','contains','contents','copy','cos','cosec',
-        'cosech','cosh','cotan','cotanh','count','count-only','cpu-cores',
-        'cpu-usage','CREATE','create_type','cross','cue','curdir','curupdir','d',
-        'Date','DateTime','day','daycount','day-of-month','day-of-week',
-        'day-of-year','days-in-month','declaration','decode','decoder','deepmap',
-        'default','defined','DEFINITE','delayed','DELETE-KEY','DELETE-POS',
-        'denominator','desc','DESTROY','destroyers','devnull','diag',
-        'did-you-mean','die','dies-ok','dir','dirname','dir-sep','DISTROnames',
-        'do','does','does-ok','done','done-testing','duckmap','dynamic','e',
-        'eager','earlier','elems','emit','enclosing','encode','encoder',
-        'encoding','end','ends-with','enum_from_value','enum_value_list',
-        'enum_values','enums','eof','EVAL','eval-dies-ok','EVALFILE',
-        'eval-lives-ok','exception','excludes-max','excludes-min','EXISTS-KEY',
-        'EXISTS-POS','exit','exitcode','exp','expected','explicitly-manage',
-        'expmod','extension','f','fail','fails-like','fc','feature','file',
-        'filename','find_method','find_method_qualified','finish','first','flat',
-        'flatmap','flip','floor','flunk','flush','fmt','format','formatter',
-        'freeze','from','from-list','from-loop','from-posix','full',
-        'full-barrier','get','get_value','getc','gist','got','grab','grabpairs',
-        'grep','handle','handled','handles','hardware','has_accessor','Hash',
-        'head','headers','hh-mm-ss','hidden','hides','hour','how','hyper','id',
-        'illegal','im','in','indent','index','indices','indir','infinite',
-        'infix','infix:<+>','infix:<->','install_method_cache','Instant',
-        'instead','Int','int-bounds','interval','in-timezone','invalid-str',
-        'invert','invocant','IO','IO::Notification.watch-path','is_trusted',
-        'is_type','isa','is-absolute','isa-ok','is-approx','is-deeply',
-        'is-hidden','is-initial-thread','is-int','is-lazy','is-leap-year',
-        'isNaN','isnt','is-prime','is-relative','is-routine','is-setting',
-        'is-win','item','iterator','join','keep','kept','KERNELnames','key',
-        'keyof','keys','kill','kv','kxxv','l','lang','last','lastcall','later',
-        'lazy','lc','leading','level','like','line','lines','link','List',
-        'listen','live','lives-ok','local','lock','log','log10','lookup','lsb',
-        'made','MAIN','make','Map','match','max','maxpairs','merge','message',
-        'method','method_table','methods','migrate','min','minmax','minpairs',
-        'minute','misplaced','Mix','MixHash','mkdir','mode','modified','month',
-        'move','mro','msb','multi','multiness','my','name','named','named_names',
-        'narrow','nativecast','native-descriptor','nativesizeof','new','new_type',
-        'new-from-daycount','new-from-pairs','next','nextcallee','next-handle',
-        'nextsame','nextwith','NFC','NFD','NFKC','NFKD','nl-in','nl-out',
-        'nodemap','nok','none','norm','not','note','now','nude','Num',
-        'numerator','Numeric','of','offset','offset-in-hours','offset-in-minutes',
-        'ok','old','on-close','one','on-switch','open','opened','operation',
-        'optional','ord','ords','orig','os-error','osname','out-buffer','pack',
-        'package','package-kind','package-name','packages','pair','pairs',
-        'pairup','parameter','params','parent','parent-name','parents','parse',
-        'parse-base','parsefile','parse-names','parts','pass','path','path-sep',
-        'payload','peer-host','peer-port','periods','perl','permutations','phaser',
-        'pick','pickpairs','pid','placeholder','plan','plus','polar','poll',
-        'polymod','pop','pos','positional','posix','postfix','postmatch',
-        'precomp-ext','precomp-target','pred','prefix','prematch','prepend',
-        'print','printf','print-nl','print-to','private','private_method_table',
-        'proc','produce','Promise','prompt','protect','pull-one','push',
-        'push-all','push-at-least','push-exactly','push-until-lazy','put',
-        'qualifier-type','quit','r','race','radix','rand','range','Rat','raw',
-        're','read','readchars','readonly','ready','Real','reallocate','reals',
-        'reason','rebless','receive','recv','redispatcher','redo','reduce',
-        'rel2abs','relative','release','rename','repeated','replacement',
-        'report','reserved','resolve','restore','result','resume','rethrow',
-        'reverse','right','rindex','rmdir','role','roles_to_compose','rolish',
-        'roll','rootdir','roots','rotate','rotor','round','roundrobin',
-        'routine-type','run','rwx','s','samecase','samemark','samewith','say',
-        'schedule-on','scheduler','scope','sec','sech','second','seek','self',
-        'send','Set','set_hidden','set_name','set_package','set_rw','set_value',
-        'SetHash','set-instruments','setup_finalization','shape','share','shell',
-        'shift','sibling','sigil','sign','signal','signals','signature','sin',
-        'sinh','sink','sink-all','skip','skip-at-least','skip-at-least-pull-one',
-        'skip-one','skip-rest','sleep','sleep-timer','sleep-until','Slip','slurp',
-        'slurp-rest','slurpy','snap','snapper','so','socket-host','socket-port',
-        'sort','source','source-package','spawn','SPEC','splice','split',
-        'splitdir','splitpath','sprintf','spurt','sqrt','squish','srand','stable',
-        'start','started','starts-with','status','stderr','stdout','Str',
-        'sub_signature','subbuf','subbuf-rw','subname','subparse','subst',
-        'subst-mutate','substr','substr-eq','substr-rw','subtest','succ','sum',
-        'Supply','symlink','t','tail','take','take-rw','tan','tanh','tap',
-        'target','target-name','tc','tclc','tell','then','throttle','throw',
-        'throws-like','timezone','tmpdir','to','today','todo','toggle','to-posix',
-        'total','trailing','trans','tree','trim','trim-leading','trim-trailing',
-        'truncate','truncated-to','trusts','try_acquire','trying','twigil','type',
-        'type_captures','typename','uc','udp','uncaught_handler','unimatch',
-        'uniname','uninames','uniparse','uniprop','uniprops','unique','unival',
-        'univals','unlike','unlink','unlock','unpack','unpolar','unshift',
-        'unwrap','updir','USAGE','use-ok','utc','val','value','values','VAR',
-        'variable','verbose-config','version','VMnames','volume','vow','w','wait',
-        'warn','watch','watch-path','week','weekday-of-month','week-number',
-        'week-year','WHAT','when','WHERE','WHEREFORE','WHICH','WHO',
-        'whole-second','WHY','wordcase','words','workaround','wrap','write',
-        'write-to','x','yada','year','yield','yyyy-mm-dd','z','zip','zip-latest',
-
-    )
-
-    PERL6_BUILTIN_CLASSES = (
-        #Booleans
-        'False','True',
-        #Classes
-        'Any','Array','Associative','AST','atomicint','Attribute','Backtrace',
-        'Backtrace::Frame','Bag','Baggy','BagHash','Blob','Block','Bool','Buf',
-        'Callable','CallFrame','Cancellation','Capture','CArray','Channel','Code',
-        'compiler','Complex','ComplexStr','Cool','CurrentThreadScheduler',
-        'Cursor','Date','Dateish','DateTime','Distro','Duration','Encoding',
-        'Exception','Failure','FatRat','Grammar','Hash','HyperWhatever','Instant',
-        'Int','int16','int32','int64','int8','IntStr','IO','IO::ArgFiles',
-        'IO::CatHandle','IO::Handle','IO::Notification','IO::Path',
-        'IO::Path::Cygwin','IO::Path::QNX','IO::Path::Unix','IO::Path::Win32',
-        'IO::Pipe','IO::Socket','IO::Socket::Async','IO::Socket::INET','IO::Spec',
-        'IO::Spec::Cygwin','IO::Spec::QNX','IO::Spec::Unix','IO::Spec::Win32',
-        'IO::Special','Iterable','Iterator','Junction','Kernel','Label','List',
-        'Lock','Lock::Async','long','longlong','Macro','Map','Match',
-        'Metamodel::AttributeContainer','Metamodel::C3MRO','Metamodel::ClassHOW',
-        'Metamodel::EnumHOW','Metamodel::Finalization','Metamodel::MethodContainer',
-        'Metamodel::MROBasedMethodDispatch','Metamodel::MultipleInheritance',
-        'Metamodel::Naming','Metamodel::Primitives','Metamodel::PrivateMethodContainer',
-        'Metamodel::RoleContainer','Metamodel::Trusting','Method','Mix','MixHash',
-        'Mixy','Mu','NFC','NFD','NFKC','NFKD','Nil','Num','num32','num64',
-        'Numeric','NumStr','ObjAt','Order','Pair','Parameter','Perl','Pod::Block',
-        'Pod::Block::Code','Pod::Block::Comment','Pod::Block::Declarator',
-        'Pod::Block::Named','Pod::Block::Para','Pod::Block::Table','Pod::Heading',
-        'Pod::Item','Pointer','Positional','PositionalBindFailover','Proc',
-        'Proc::Async','Promise','Proxy','PseudoStash','QuantHash','Range','Rat',
-        'Rational','RatStr','Real','Regex','Routine','Scalar','Scheduler',
-        'Semaphore','Seq','Set','SetHash','Setty','Signature','size_t','Slip',
-        'Stash','Str','StrDistance','Stringy','Sub','Submethod','Supplier',
-        'Supplier::Preserving','Supply','Systemic','Tap','Telemetry',
-        'Telemetry::Instrument::Thread','Telemetry::Instrument::Usage',
-        'Telemetry::Period','Telemetry::Sampler','Thread','ThreadPoolScheduler',
-        'UInt','uint16','uint32','uint64','uint8','Uni','utf8','Variable',
-        'Version','VM','Whatever','WhateverCode','WrapHandle'
-    )
-
-    PERL6_OPERATORS = (
-        'X', 'Z', 'after', 'also', 'and', 'andthen', 'before', 'cmp', 'div',
-        'eq', 'eqv', 'extra', 'ff', 'fff', 'ge', 'gt', 'le', 'leg', 'lt', 'm',
-        'mm', 'mod', 'ne', 'or', 'orelse', 'rx', 's', 'tr', 'x', 'xor', 'xx',
-        '++', '--', '**', '!', '+', '-', '~', '?', '|', '||', '+^', '~^', '?^',
-        '^', '*', '/', '%', '%%', '+&', '+<', '+>', '~&', '~<', '~>', '?&',
-        'gcd', 'lcm', '+', '-', '+|', '+^', '~|', '~^', '?|', '?^',
-        '~', '&', '^', 'but', 'does', '<=>', '..', '..^', '^..', '^..^',
-        '!=', '==', '<', '<=', '>', '>=', '~~', '===', '!eqv',
-        '&&', '||', '^^', '//', 'min', 'max', '??', '!!', 'ff', 'fff', 'so',
-        'not', '<==', '==>', '<<==', '==>>','unicmp',
-    )
-
-    # Perl 6 has a *lot* of possible bracketing characters
-    # this list was lifted from STD.pm6 (https://github.com/perl6/std)
-    PERL6_BRACKETS = {
-        '\u0028': '\u0029', '\u003c': '\u003e', '\u005b': '\u005d',
-        '\u007b': '\u007d', '\u00ab': '\u00bb', '\u0f3a': '\u0f3b',
-        '\u0f3c': '\u0f3d', '\u169b': '\u169c', '\u2018': '\u2019',
-        '\u201a': '\u2019', '\u201b': '\u2019', '\u201c': '\u201d',
-        '\u201e': '\u201d', '\u201f': '\u201d', '\u2039': '\u203a',
-        '\u2045': '\u2046', '\u207d': '\u207e', '\u208d': '\u208e',
-        '\u2208': '\u220b', '\u2209': '\u220c', '\u220a': '\u220d',
-        '\u2215': '\u29f5', '\u223c': '\u223d', '\u2243': '\u22cd',
-        '\u2252': '\u2253', '\u2254': '\u2255', '\u2264': '\u2265',
-        '\u2266': '\u2267', '\u2268': '\u2269', '\u226a': '\u226b',
-        '\u226e': '\u226f', '\u2270': '\u2271', '\u2272': '\u2273',
-        '\u2274': '\u2275', '\u2276': '\u2277', '\u2278': '\u2279',
-        '\u227a': '\u227b', '\u227c': '\u227d', '\u227e': '\u227f',
-        '\u2280': '\u2281', '\u2282': '\u2283', '\u2284': '\u2285',
-        '\u2286': '\u2287', '\u2288': '\u2289', '\u228a': '\u228b',
-        '\u228f': '\u2290', '\u2291': '\u2292', '\u2298': '\u29b8',
-        '\u22a2': '\u22a3', '\u22a6': '\u2ade', '\u22a8': '\u2ae4',
-        '\u22a9': '\u2ae3', '\u22ab': '\u2ae5', '\u22b0': '\u22b1',
-        '\u22b2': '\u22b3', '\u22b4': '\u22b5', '\u22b6': '\u22b7',
-        '\u22c9': '\u22ca', '\u22cb': '\u22cc', '\u22d0': '\u22d1',
-        '\u22d6': '\u22d7', '\u22d8': '\u22d9', '\u22da': '\u22db',
-        '\u22dc': '\u22dd', '\u22de': '\u22df', '\u22e0': '\u22e1',
-        '\u22e2': '\u22e3', '\u22e4': '\u22e5', '\u22e6': '\u22e7',
-        '\u22e8': '\u22e9', '\u22ea': '\u22eb', '\u22ec': '\u22ed',
-        '\u22f0': '\u22f1', '\u22f2': '\u22fa', '\u22f3': '\u22fb',
-        '\u22f4': '\u22fc', '\u22f6': '\u22fd', '\u22f7': '\u22fe',
-        '\u2308': '\u2309', '\u230a': '\u230b', '\u2329': '\u232a',
-        '\u23b4': '\u23b5', '\u2768': '\u2769', '\u276a': '\u276b',
-        '\u276c': '\u276d', '\u276e': '\u276f', '\u2770': '\u2771',
-        '\u2772': '\u2773', '\u2774': '\u2775', '\u27c3': '\u27c4',
-        '\u27c5': '\u27c6', '\u27d5': '\u27d6', '\u27dd': '\u27de',
-        '\u27e2': '\u27e3', '\u27e4': '\u27e5', '\u27e6': '\u27e7',
-        '\u27e8': '\u27e9', '\u27ea': '\u27eb', '\u2983': '\u2984',
-        '\u2985': '\u2986', '\u2987': '\u2988', '\u2989': '\u298a',
-        '\u298b': '\u298c', '\u298d': '\u298e', '\u298f': '\u2990',
-        '\u2991': '\u2992', '\u2993': '\u2994', '\u2995': '\u2996',
-        '\u2997': '\u2998', '\u29c0': '\u29c1', '\u29c4': '\u29c5',
-        '\u29cf': '\u29d0', '\u29d1': '\u29d2', '\u29d4': '\u29d5',
-        '\u29d8': '\u29d9', '\u29da': '\u29db', '\u29f8': '\u29f9',
-        '\u29fc': '\u29fd', '\u2a2b': '\u2a2c', '\u2a2d': '\u2a2e',
-        '\u2a34': '\u2a35', '\u2a3c': '\u2a3d', '\u2a64': '\u2a65',
-        '\u2a79': '\u2a7a', '\u2a7d': '\u2a7e', '\u2a7f': '\u2a80',
-        '\u2a81': '\u2a82', '\u2a83': '\u2a84', '\u2a8b': '\u2a8c',
-        '\u2a91': '\u2a92', '\u2a93': '\u2a94', '\u2a95': '\u2a96',
-        '\u2a97': '\u2a98', '\u2a99': '\u2a9a', '\u2a9b': '\u2a9c',
-        '\u2aa1': '\u2aa2', '\u2aa6': '\u2aa7', '\u2aa8': '\u2aa9',
-        '\u2aaa': '\u2aab', '\u2aac': '\u2aad', '\u2aaf': '\u2ab0',
-        '\u2ab3': '\u2ab4', '\u2abb': '\u2abc', '\u2abd': '\u2abe',
-        '\u2abf': '\u2ac0', '\u2ac1': '\u2ac2', '\u2ac3': '\u2ac4',
-        '\u2ac5': '\u2ac6', '\u2acd': '\u2ace', '\u2acf': '\u2ad0',
-        '\u2ad1': '\u2ad2', '\u2ad3': '\u2ad4', '\u2ad5': '\u2ad6',
-        '\u2aec': '\u2aed', '\u2af7': '\u2af8', '\u2af9': '\u2afa',
-        '\u2e02': '\u2e03', '\u2e04': '\u2e05', '\u2e09': '\u2e0a',
-        '\u2e0c': '\u2e0d', '\u2e1c': '\u2e1d', '\u2e20': '\u2e21',
-        '\u3008': '\u3009', '\u300a': '\u300b', '\u300c': '\u300d',
-        '\u300e': '\u300f', '\u3010': '\u3011', '\u3014': '\u3015',
-        '\u3016': '\u3017', '\u3018': '\u3019', '\u301a': '\u301b',
-        '\u301d': '\u301e', '\ufd3e': '\ufd3f', '\ufe17': '\ufe18',
-        '\ufe35': '\ufe36', '\ufe37': '\ufe38', '\ufe39': '\ufe3a',
-        '\ufe3b': '\ufe3c', '\ufe3d': '\ufe3e', '\ufe3f': '\ufe40',
-        '\ufe41': '\ufe42', '\ufe43': '\ufe44', '\ufe47': '\ufe48',
-        '\ufe59': '\ufe5a', '\ufe5b': '\ufe5c', '\ufe5d': '\ufe5e',
-        '\uff08': '\uff09', '\uff1c': '\uff1e', '\uff3b': '\uff3d',
-        '\uff5b': '\uff5d', '\uff5f': '\uff60', '\uff62': '\uff63',
-    }
-
-    def _build_word_match(words, boundary_regex_fragment=None, prefix='', suffix=''):
-        if boundary_regex_fragment is None:
-            return r'\b(' + prefix + r'|'.join(re.escape(x) for x in words) + \
-                suffix + r')\b'
-        else:
-            return r'(? 0:
-                    next_open_pos = text.find(opening_chars, search_pos + n_chars)
-                    next_close_pos = text.find(closing_chars, search_pos + n_chars)
-
-                    if next_close_pos == -1:
-                        next_close_pos = len(text)
-                        nesting_level = 0
-                    elif next_open_pos != -1 and next_open_pos < next_close_pos:
-                        nesting_level += 1
-                        search_pos = next_open_pos
-                    else:  # next_close_pos < next_open_pos
-                        nesting_level -= 1
-                        search_pos = next_close_pos
-
-                end_pos = next_close_pos
-
-            if end_pos < 0:     # if we didn't find a closer, just highlight the
-                                # rest of the text in this class
-                end_pos = len(text)
-
-            if adverbs is not None and re.search(r':to\b', adverbs):
-                heredoc_terminator = text[match.start('delimiter') + n_chars:end_pos]
-                end_heredoc = re.search(r'^\s*' + re.escape(heredoc_terminator) +
-                                        r'\s*$', text[end_pos:], re.MULTILINE)
-
-                if end_heredoc:
-                    end_pos += end_heredoc.end()
-                else:
-                    end_pos = len(text)
-
-            yield match.start(), token_class, text[match.start():end_pos + n_chars]
-            context.pos = end_pos + n_chars
-
-        return callback
-
-    def opening_brace_callback(lexer, match, context):
-        stack = context.stack
-
-        yield match.start(), Text, context.text[match.start():match.end()]
-        context.pos = match.end()
-
-        # if we encounter an opening brace and we're one level
-        # below a token state, it means we need to increment
-        # the nesting level for braces so we know later when
-        # we should return to the token rules.
-        if len(stack) > 2 and stack[-2] == 'token':
-            context.perl6_token_nesting_level += 1
-
-    def closing_brace_callback(lexer, match, context):
-        stack = context.stack
-
-        yield match.start(), Text, context.text[match.start():match.end()]
-        context.pos = match.end()
-
-        # if we encounter a free closing brace and we're one level
-        # below a token state, it means we need to check the nesting
-        # level to see if we need to return to the token state.
-        if len(stack) > 2 and stack[-2] == 'token':
-            context.perl6_token_nesting_level -= 1
-            if context.perl6_token_nesting_level == 0:
-                stack.pop()
-
-    def embedded_perl6_callback(lexer, match, context):
-        context.perl6_token_nesting_level = 1
-        yield match.start(), Text, context.text[match.start():match.end()]
-        context.pos = match.end()
-        context.stack.append('root')
-
-    # If you're modifying these rules, be careful if you need to process '{' or '}'
-    # characters. We have special logic for processing these characters (due to the fact
-    # that you can nest Perl 6 code in regex blocks), so if you need to process one of
-    # them, make sure you also process the corresponding one!
-    tokens = {
-        'common': [
-            (r'#[`|=](?P(?P[' + ''.join(PERL6_BRACKETS) + r'])(?P=first_char)*)',
-             brackets_callback(Comment.Multiline)),
-            (r'#[^\n]*$', Comment.Single),
-            (r'^(\s*)=begin\s+(\w+)\b.*?^\1=end\s+\2', Comment.Multiline),
-            (r'^(\s*)=for.*?\n\s*?\n', Comment.Multiline),
-            (r'^=.*?\n\s*?\n', Comment.Multiline),
-            (r'(regex|token|rule)(\s*' + PERL6_IDENTIFIER_RANGE + '+:sym)',
-             bygroups(Keyword, Name), 'token-sym-brackets'),
-            (r'(regex|token|rule)(?!' + PERL6_IDENTIFIER_RANGE + r')(\s*' + PERL6_IDENTIFIER_RANGE + '+)?',
-             bygroups(Keyword, Name), 'pre-token'),
-            # deal with a special case in the Perl 6 grammar (role q { ... })
-            (r'(role)(\s+)(q)(\s*)', bygroups(Keyword, Whitespace, Name, Whitespace)),
-            (_build_word_match(PERL6_KEYWORDS, PERL6_IDENTIFIER_RANGE), Keyword),
-            (_build_word_match(PERL6_BUILTIN_CLASSES, PERL6_IDENTIFIER_RANGE, suffix='(?::[UD])?'),
-             Name.Builtin),
-            (_build_word_match(PERL6_BUILTINS, PERL6_IDENTIFIER_RANGE), Name.Builtin),
-            # copied from PerlLexer
-            (r'[$@%&][.^:?=!~]?' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
-             Name.Variable),
-            (r'\$[!/](?:<<.*?>>|<.*?>|«.*?»)*', Name.Variable.Global),
-            (r'::\?\w+', Name.Variable.Global),
-            (r'[$@%&]\*' + PERL6_IDENTIFIER_RANGE + '+(?:<<.*?>>|<.*?>|«.*?»)*',
-             Name.Variable.Global),
-            (r'\$(?:<.*?>)+', Name.Variable),
-            (r'(?:q|qq|Q)[a-zA-Z]?\s*(?P:[\w\s:]+)?\s*(?P(?P[^0-9a-zA-Z:\s])'
-             r'(?P=first_char)*)', brackets_callback(String)),
-            # copied from PerlLexer
-            (r'0_?[0-7]+(_[0-7]+)*', Number.Oct),
-            (r'0x[0-9A-Fa-f]+(_[0-9A-Fa-f]+)*', Number.Hex),
-            (r'0b[01]+(_[01]+)*', Number.Bin),
-            (r'(?i)(\d*(_\d*)*\.\d+(_\d*)*|\d+(_\d*)*\.\d+(_\d*)*)(e[+-]?\d+)?',
-             Number.Float),
-            (r'(?i)\d+(_\d*)*e[+-]?\d+(_\d*)*', Number.Float),
-            (r'\d+(_\d+)*', Number.Integer),
-            (r'(?<=~~)\s*/(?:\\\\|\\/|.)*?/', String.Regex),
-            (r'(?<=[=(,])\s*/(?:\\\\|\\/|.)*?/', String.Regex),
-            (r'm\w+(?=\()', Name),
-            (r'(?:m|ms|rx)\s*(?P:[\w\s:]+)?\s*(?P(?P[^\w:\s])'
-             r'(?P=first_char)*)', brackets_callback(String.Regex)),
-            (r'(?:s|ss|tr)\s*(?::[\w\s:]+)?\s*/(?:\\\\|\\/|.)*?/(?:\\\\|\\/|.)*?/',
-             String.Regex),
-            (r'<[^\s=].*?\S>', String),
-            (_build_word_match(PERL6_OPERATORS), Operator),
-            (r'\w' + PERL6_IDENTIFIER_RANGE + '*', Name),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-        ],
-        'root': [
-            include('common'),
-            (r'\{', opening_brace_callback),
-            (r'\}', closing_brace_callback),
-            (r'.+?', Text),
-        ],
-        'pre-token': [
-            include('common'),
-            (r'\{', Text, ('#pop', 'token')),
-            (r'.+?', Text),
-        ],
-        'token-sym-brackets': [
-            (r'(?P(?P[' + ''.join(PERL6_BRACKETS) + '])(?P=first_char)*)',
-             brackets_callback(Name), ('#pop', 'pre-token')),
-            default(('#pop', 'pre-token')),
-        ],
-        'token': [
-            (r'\}', Text, '#pop'),
-            (r'(?<=:)(?:my|our|state|constant|temp|let).*?;', using(this)),
-            # make sure that quotes in character classes aren't treated as strings
-            (r'<(?:[-!?+.]\s*)?\[.*?\]>', String.Regex),
-            # make sure that '#' characters in quotes aren't treated as comments
-            (r"(?my|our)\s+)?(?:module|class|role|enum|grammar)', line)
-            if class_decl:
-                if saw_perl_decl or class_decl.group('scope') is not None:
-                    return True
-                rating = 0.05
-                continue
-            break
-
-        if ':=' in text:
-            # Same logic as above for PerlLexer
-            rating /= 2
-
-        return rating
-
-    def __init__(self, **options):
-        super().__init__(**options)
-        self.encoding = options.get('encoding', 'utf-8')
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/phix.py b/venv/lib/python3.11/site-packages/pygments/lexers/phix.py
deleted file mode 100644
index fb08b1d..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/phix.py
+++ /dev/null
@@ -1,364 +0,0 @@
-"""
-    pygments.lexers.phix
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Phix.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Whitespace
-
-__all__ = ['PhixLexer']
-
-
-class PhixLexer(RegexLexer):
-    """
-    Pygments Lexer for Phix files (.exw).
-    See http://phix.x10.mx
-
-    .. versionadded:: 2.14.0
-    """
-
-    name = 'Phix'
-    url = 'http://phix.x10.mx'
-    aliases = ['phix']
-    filenames = ['*.exw']
-    mimetypes = ['text/x-phix']
-
-    flags = re.MULTILINE    # nb: **NOT** re.DOTALL! (totally spanners comment handling)
-
-    preproc = (
-        'ifdef', 'elsifdef', 'elsedef'
-    )
-    # Note these lists are auto-generated by pwa/p2js.exw, when pwa\src\p2js_keywords.e (etc)
-    #     change, though of course subsequent copy/commit/pull requests are all manual steps.
-    types = (
-        'string', 'nullable_string', 'atom_string', 'atom', 'bool', 'boolean',
-        'cdCanvan', 'cdCanvas', 'complex', 'CURLcode', 'dictionary', 'int',
-        'integer', 'Ihandle', 'Ihandles', 'Ihandln', 'mpfr', 'mpq', 'mpz',
-        'mpz_or_string', 'number', 'rid_string', 'seq', 'sequence', 'timedate',
-        'object'
-    )
-    keywords = (
-        'abstract', 'class', 'continue', 'export', 'extends', 'nullable',
-        'private', 'public', 'static', 'struct', 'trace',
-        'and', 'break', 'by', 'case', 'catch', 'const', 'constant', 'debug',
-        'default', 'do', 'else', 'elsif', 'end', 'enum', 'exit', 'fallthru',
-        'fallthrough', 'for', 'forward', 'function', 'global', 'if', 'in',
-        'include', 'js', 'javascript', 'javascript_semantics', 'let', 'not',
-        'or', 'procedure', 'profile', 'profile_time', 'return', 'safe_mode',
-        'switch', 'then', 'to', 'try', 'type', 'type_check', 'until', 'warning',
-        'while', 'with', 'without', 'xor'
-    )
-    routines = (
-        'abort', 'abs', 'adjust_timedate', 'and_bits', 'and_bitsu', 'apply',
-        'append', 'arccos', 'arcsin', 'arctan', 'assert', 'atan2',
-        'atom_to_float32', 'atom_to_float64', 'bankers_rounding', 'beep',
-        'begins', 'binary_search', 'bits_to_int', 'bk_color', 'bytes_to_int',
-        'call_func', 'call_proc', 'cdCanvasActivate', 'cdCanvasArc',
-        'cdCanvasBegin', 'cdCanvasBox', 'cdCanvasChord', 'cdCanvasCircle',
-        'cdCanvasClear', 'cdCanvasEnd', 'cdCanvasFlush', 'cdCanvasFont',
-        'cdCanvasGetImageRGB', 'cdCanvasGetSize', 'cdCanvasGetTextAlignment',
-        'cdCanvasGetTextSize', 'cdCanvasLine', 'cdCanvasMark',
-        'cdCanvasMarkSize', 'cdCanvasMultiLineVectorText', 'cdCanvasPixel',
-        'cdCanvasRect', 'cdCanvasRoundedBox', 'cdCanvasRoundedRect',
-        'cdCanvasSector', 'cdCanvasSetAttribute', 'cdCanvasSetBackground',
-        'cdCanvasSetFillMode', 'cdCanvasSetForeground',
-        'cdCanvasSetInteriorStyle', 'cdCanvasSetLineStyle',
-        'cdCanvasSetLineWidth', 'cdCanvasSetTextAlignment', 'cdCanvasText',
-        'cdCanvasSetTextOrientation', 'cdCanvasGetTextOrientation',
-        'cdCanvasVectorText', 'cdCanvasVectorTextDirection',
-        'cdCanvasVectorTextSize', 'cdCanvasVertex', 'cdCreateCanvas',
-        'cdDecodeAlpha', 'cdDecodeColor', 'cdDecodeColorAlpha', 'cdEncodeAlpha',
-        'cdEncodeColor', 'cdEncodeColorAlpha', 'cdKillCanvas', 'cdVersion',
-        'cdVersionDate', 'ceil', 'change_timezone', 'choose', 'clear_screen',
-        'columnize', 'command_line', 'compare', 'complex_abs', 'complex_add',
-        'complex_arg', 'complex_conjugate', 'complex_cos', 'complex_cosh',
-        'complex_div', 'complex_exp', 'complex_imag', 'complex_inv',
-        'complex_log', 'complex_mul', 'complex_neg', 'complex_new',
-        'complex_norm', 'complex_power', 'complex_rho', 'complex_real',
-        'complex_round', 'complex_sin', 'complex_sinh', 'complex_sprint',
-        'complex_sqrt', 'complex_sub', 'complex_theta', 'concat', 'cos',
-        'crash', 'custom_sort', 'date', 'day_of_week', 'day_of_year',
-        'days_in_month', 'decode_base64', 'decode_flags', 'deep_copy', 'deld',
-        'deserialize', 'destroy_dict', 'destroy_queue', 'destroy_stack',
-        'dict_name', 'dict_size', 'elapsed', 'elapsed_short', 'encode_base64',
-        'equal', 'even', 'exp', 'extract', 'factorial', 'factors',
-        'file_size_k', 'find', 'find_all', 'find_any', 'find_replace', 'filter',
-        'flatten', 'float32_to_atom', 'float64_to_atom', 'floor',
-        'format_timedate', 'free_console', 'from_polar', 'gcd', 'get_file_base',
-        'get_file_extension', 'get_file_name', 'get_file_name_and_path',
-        'get_file_path', 'get_file_path_and_name', 'get_maxprime', 'get_prime',
-        'get_primes', 'get_primes_le', 'get_proper_dir', 'get_proper_path',
-        'get_rand', 'get_routine_info', 'get_test_abort', 'get_test_logfile',
-        'get_test_pause', 'get_test_verbosity', 'get_tzid', 'getd', 'getdd',
-        'getd_all_keys', 'getd_by_index', 'getd_index', 'getd_partial_key',
-        'glAttachShader', 'glBindBuffer', 'glBindTexture', 'glBufferData',
-        'glCanvasSpecialText', 'glClear', 'glClearColor', 'glColor',
-        'glCompileShader', 'glCreateBuffer', 'glCreateProgram',
-        'glCreateShader', 'glCreateTexture', 'glDeleteProgram',
-        'glDeleteShader', 'glDrawArrays', 'glEnable',
-        'glEnableVertexAttribArray', 'glFloat32Array', 'glInt32Array',
-        'glFlush', 'glGetAttribLocation', 'glGetError', 'glGetProgramInfoLog',
-        'glGetProgramParameter', 'glGetShaderInfoLog', 'glGetShaderParameter',
-        'glGetUniformLocation', 'glLinkProgram', 'glLoadIdentity',
-        'glMatrixMode', 'glOrtho', 'glRotatef', 'glShadeModel',
-        'glShaderSource', 'glSimpleA7texcoords', 'glTexImage2Dc',
-        'glTexParameteri', 'glTranslate', 'glUniform1f', 'glUniform1i',
-        'glUniformMatrix4fv', 'glUseProgram', 'glVertex',
-        'glVertexAttribPointer', 'glViewport', 'head', 'hsv_to_rgb', 'iff',
-        'iif', 'include_file', 'incl0de_file', 'insert', 'instance',
-        'int_to_bits', 'int_to_bytes', 'is_dict', 'is_integer', 's_leap_year',
-        'is_prime', 'is_prime2', 'islower', 'isupper', 'Icallback',
-        'iup_isdouble', 'iup_isprint', 'iup_XkeyBase', 'IupAppend', 'IupAlarm',
-        'IupBackgroundBox', 'IupButton', 'IupCalendar', 'IupCanvas',
-        'IupClipboard', 'IupClose', 'IupCloseOnEscape', 'IupControlsOpen',
-        'IupDatePick', 'IupDestroy', 'IupDialog', 'IupDrawArc', 'IupDrawBegin',
-        'IupDrawEnd', 'IupDrawGetSize', 'IupDrawGetTextSize', 'IupDrawLine',
-        'IupDrawRectangle', 'IupDrawText', 'IupExpander', 'IupFill',
-        'IupFlatLabel', 'IupFlatList', 'IupFlatTree', 'IupFlush', 'IupFrame',
-        'IupGetAttribute', 'IupGetAttributeId', 'IupGetAttributePtr',
-        'IupGetBrother', 'IupGetChild', 'IupGetChildCount', 'IupGetClassName',
-        'IupGetDialog', 'IupGetDialogChild', 'IupGetDouble', 'IupGetFocus',
-        'IupGetGlobal', 'IupGetGlobalInt', 'IupGetGlobalIntInt', 'IupGetInt',
-        'IupGetInt2', 'IupGetIntId', 'IupGetIntInt', 'IupGetParent',
-        'IupGLCanvas', 'IupGLCanvasOpen', 'IupGLMakeCurrent', 'IupGraph',
-        'IupHbox', 'IupHide', 'IupImage', 'IupImageRGBA', 'IupItem',
-        'iupKeyCodeToName', 'IupLabel', 'IupLink', 'IupList', 'IupMap',
-        'IupMenu', 'IupMenuItem', 'IupMessage', 'IupMessageDlg', 'IupMultiBox',
-        'IupMultiLine', 'IupNextField', 'IupNormaliser', 'IupOpen',
-        'IupPlayInput', 'IupPopup', 'IupPreviousField', 'IupProgressBar',
-        'IupRadio', 'IupRecordInput', 'IupRedraw', 'IupRefresh',
-        'IupRefreshChildren', 'IupSeparator', 'IupSetAttribute',
-        'IupSetAttributes', 'IupSetAttributeHandle', 'IupSetAttributeId',
-        'IupSetAttributePtr', 'IupSetCallback', 'IupSetCallbacks',
-        'IupSetDouble', 'IupSetFocus', 'IupSetGlobal', 'IupSetGlobalInt',
-        'IupSetGlobalFunction', 'IupSetHandle', 'IupSetInt',
-        'IupSetStrAttribute', 'IupSetStrGlobal', 'IupShow', 'IupShowXY',
-        'IupSplit', 'IupStoreAttribute', 'IupSubmenu', 'IupTable',
-        'IupTableClearSelected', 'IupTableClick_cb', 'IupTableGetSelected',
-        'IupTableResize_cb', 'IupTableSetData', 'IupTabs', 'IupText',
-        'IupTimer', 'IupToggle', 'IupTreeAddNodes', 'IupTreeView', 'IupUpdate',
-        'IupValuator', 'IupVbox', 'join', 'join_by', 'join_path', 'k_perm',
-        'largest', 'lcm', 'length', 'log', 'log10', 'log2', 'lower',
-        'm4_crossProduct', 'm4_inverse', 'm4_lookAt', 'm4_multiply',
-        'm4_normalize', 'm4_perspective', 'm4_subtractVectors', 'm4_xRotate',
-        'm4_yRotate', 'machine_bits', 'machine_word', 'match', 'match_all',
-        'match_replace', 'max', 'maxsq', 'min', 'minsq', 'mod', 'mpfr_add',
-        'mpfr_ceil', 'mpfr_cmp', 'mpfr_cmp_si', 'mpfr_const_pi', 'mpfr_div',
-        'mpfr_div_si', 'mpfr_div_z', 'mpfr_floor', 'mpfr_free', 'mpfr_get_d',
-        'mpfr_get_default_precision', 'mpfr_get_default_rounding_mode',
-        'mpfr_get_fixed', 'mpfr_get_precision', 'mpfr_get_si', 'mpfr_init',
-        'mpfr_inits', 'mpfr_init_set', 'mpfr_init_set_q', 'mpfr_init_set_z',
-        'mpfr_mul', 'mpfr_mul_si', 'mpfr_pow_si', 'mpfr_set', 'mpfr_set_d',
-        'mpfr_set_default_precision', 'mpfr_set_default_rounding_mode',
-        'mpfr_set_precision', 'mpfr_set_q', 'mpfr_set_si', 'mpfr_set_str',
-        'mpfr_set_z', 'mpfr_si_div', 'mpfr_si_sub', 'mpfr_sqrt', 'mpfr_sub',
-        'mpfr_sub_si', 'mpq_abs', 'mpq_add', 'mpq_add_si', 'mpq_canonicalize',
-        'mpq_cmp', 'mpq_cmp_si', 'mpq_div', 'mpq_div_2exp', 'mpq_free',
-        'mpq_get_den', 'mpq_get_num', 'mpq_get_str', 'mpq_init', 'mpq_init_set',
-        'mpq_init_set_si', 'mpq_init_set_str', 'mpq_init_set_z', 'mpq_inits',
-        'mpq_inv', 'mpq_mul', 'mpq_neg', 'mpq_set', 'mpq_set_si', 'mpq_set_str',
-        'mpq_set_z', 'mpq_sub', 'mpz_abs', 'mpz_add', 'mpz_addmul',
-        'mpz_addmul_ui', 'mpz_addmul_si', 'mpz_add_si', 'mpz_add_ui', 'mpz_and',
-        'mpz_bin_uiui', 'mpz_cdiv_q', 'mpz_cmp', 'mpz_cmp_si', 'mpz_divexact',
-        'mpz_divexact_ui', 'mpz_divisible_p', 'mpz_divisible_ui_p', 'mpz_even',
-        'mpz_fac_ui', 'mpz_factorstring', 'mpz_fdiv_q', 'mpz_fdiv_q_2exp',
-        'mpz_fdiv_q_ui', 'mpz_fdiv_qr', 'mpz_fdiv_r', 'mpz_fdiv_ui',
-        'mpz_fib_ui', 'mpz_fib2_ui', 'mpz_fits_atom', 'mpz_fits_integer',
-        'mpz_free', 'mpz_gcd', 'mpz_gcd_ui', 'mpz_get_atom', 'mpz_get_integer',
-        'mpz_get_short_str', 'mpz_get_str', 'mpz_init', 'mpz_init_set',
-        'mpz_inits', 'mpz_invert', 'mpz_lcm', 'mpz_lcm_ui', 'mpz_max',
-        'mpz_min', 'mpz_mod', 'mpz_mod_ui', 'mpz_mul', 'mpz_mul_2exp',
-        'mpz_mul_d', 'mpz_mul_si', 'mpz_neg', 'mpz_nthroot', 'mpz_odd',
-        'mpz_pollard_rho', 'mpz_pow_ui', 'mpz_powm', 'mpz_powm_ui', 'mpz_prime',
-        'mpz_prime_factors', 'mpz_prime_mr', 'mpz_rand', 'mpz_rand_ui',
-        'mpz_re_compose', 'mpz_remove', 'mpz_scan0', 'mpz_scan1', 'mpz_set',
-        'mpz_set_d', 'mpz_set_si', 'mpz_set_str', 'mpz_set_v', 'mpz_sign',
-        'mpz_sizeinbase', 'mpz_sqrt', 'mpz_sub', 'mpz_sub_si', 'mpz_sub_ui',
-        'mpz_si_sub', 'mpz_tdiv_q_2exp', 'mpz_tdiv_r_2exp', 'mpz_tstbit',
-        'mpz_ui_pow_ui', 'mpz_xor', 'named_dict', 'new_dict', 'new_queue',
-        'new_stack', 'not_bits', 'not_bitsu', 'odd', 'or_all', 'or_allu',
-        'or_bits', 'or_bitsu', 'ord', 'ordinal', 'ordinant',
-        'override_timezone', 'pad', 'pad_head', 'pad_tail', 'parse_date_string',
-        'papply', 'peep', 'peepn', 'peep_dict', 'permute', 'permutes',
-        'platform', 'pop', 'popn', 'pop_dict', 'power', 'pp', 'ppEx', 'ppExf',
-        'ppf', 'ppOpt', 'pq_add', 'pq_destroy', 'pq_empty', 'pq_new', 'pq_peek',
-        'pq_pop', 'pq_pop_data', 'pq_size', 'prepend', 'prime_factors',
-        'printf', 'product', 'proper', 'push', 'pushn', 'putd', 'puts',
-        'queue_empty', 'queue_size', 'rand', 'rand_range', 'reinstate',
-        'remainder', 'remove', 'remove_all', 'repeat', 'repeatch', 'replace',
-        'requires', 'reverse', 'rfind', 'rgb', 'rmatch', 'rmdr', 'rnd', 'round',
-        'routine_id', 'scanf', 'serialize', 'series', 'set_rand',
-        'set_test_abort', 'set_test_logfile', 'set_test_module',
-        'set_test_pause', 'set_test_verbosity', 'set_timedate_formats',
-        'set_timezone', 'setd', 'setd_default', 'shorten', 'sha256',
-        'shift_bits', 'shuffle', 'sign', 'sin', 'smallest', 'sort',
-        'sort_columns', 'speak', 'splice', 'split', 'split_any', 'split_by',
-        'sprint', 'sprintf', 'sq_abs', 'sq_add', 'sq_and', 'sq_and_bits',
-        'sq_arccos', 'sq_arcsin', 'sq_arctan', 'sq_atom', 'sq_ceil', 'sq_cmp',
-        'sq_cos', 'sq_div', 'sq_even', 'sq_eq', 'sq_floor', 'sq_floor_div',
-        'sq_ge', 'sq_gt', 'sq_int', 'sq_le', 'sq_log', 'sq_log10', 'sq_log2',
-        'sq_lt', 'sq_max', 'sq_min', 'sq_mod', 'sq_mul', 'sq_ne', 'sq_not',
-        'sq_not_bits', 'sq_odd', 'sq_or', 'sq_or_bits', 'sq_power', 'sq_rand',
-        'sq_remainder', 'sq_rmdr', 'sq_rnd', 'sq_round', 'sq_seq', 'sq_sign',
-        'sq_sin', 'sq_sqrt', 'sq_str', 'sq_sub', 'sq_tan', 'sq_trunc',
-        'sq_uminus', 'sq_xor', 'sq_xor_bits', 'sqrt', 'square_free',
-        'stack_empty', 'stack_size', 'substitute', 'substitute_all', 'sum',
-        'tail', 'tan', 'test_equal', 'test_fail', 'test_false',
-        'test_not_equal', 'test_pass', 'test_summary', 'test_true',
-        'text_color', 'throw', 'time', 'timedate_diff', 'timedelta',
-        'to_integer', 'to_number', 'to_rgb', 'to_string', 'traverse_dict',
-        'traverse_dict_partial_key', 'trim', 'trim_head', 'trim_tail', 'trunc',
-        'tagset', 'tagstart', 'typeof', 'unique', 'unix_dict', 'upper',
-        'utf8_to_utf32', 'utf32_to_utf8', 'version', 'vlookup', 'vslice',
-        'wglGetProcAddress', 'wildcard_file', 'wildcard_match', 'with_rho',
-        'with_theta', 'xml_new_doc', 'xml_new_element', 'xml_set_attribute',
-        'xml_sprint', 'xor_bits', 'xor_bitsu',
-        'accept', 'allocate', 'allocate_string', 'allow_break', 'ARM',
-        'atom_to_float80', 'c_func', 'c_proc', 'call_back', 'chdir',
-        'check_break', 'clearDib', 'close', 'closesocket', 'console',
-        'copy_file', 'create', 'create_directory', 'create_thread',
-        'curl_easy_cleanup', 'curl_easy_get_file', 'curl_easy_init',
-        'curl_easy_perform', 'curl_easy_perform_ex', 'curl_easy_setopt',
-        'curl_easy_strerror', 'curl_global_cleanup', 'curl_global_init',
-        'curl_slist_append', 'curl_slist_free_all', 'current_dir', 'cursor',
-        'define_c_func', 'define_c_proc', 'delete', 'delete_cs', 'delete_file',
-        'dir', 'DLL', 'drawDib', 'drawShadedPolygonToDib', 'ELF32', 'ELF64',
-        'enter_cs', 'eval', 'exit_thread', 'free', 'file_exists', 'final',
-        'float80_to_atom', 'format', 'get_bytes', 'get_file_date',
-        'get_file_size', 'get_file_type', 'get_interpreter', 'get_key',
-        'get_socket_error', 'get_text', 'get_thread_exitcode', 'get_thread_id',
-        'getc', 'getenv', 'gets', 'getsockaddr', 'glBegin', 'glCallList',
-        'glFrustum', 'glGenLists', 'glGetString', 'glLight', 'glMaterial',
-        'glNewList', 'glNormal', 'glPopMatrix', 'glPushMatrix', 'glRotate',
-        'glEnd', 'glEndList', 'glTexImage2D', 'goto', 'GUI', 'icons', 'ilASM',
-        'include_files', 'include_paths', 'init_cs', 'ip_to_string',
-        'IupConfig', 'IupConfigDialogClosed', 'IupConfigDialogShow',
-        'IupConfigGetVariableInt', 'IupConfigLoad', 'IupConfigSave',
-        'IupConfigSetVariableInt', 'IupExitLoop', 'IupFileDlg', 'IupFileList',
-        'IupGLSwapBuffers', 'IupHelp', 'IupLoopStep', 'IupMainLoop',
-        'IupNormalizer', 'IupPlot', 'IupPlotAdd', 'IupPlotBegin', 'IupPlotEnd',
-        'IupPlotInsert', 'IupSaveImage', 'IupTreeGetUserId', 'IupUser',
-        'IupVersion', 'IupVersionDate', 'IupVersionNumber', 'IupVersionShow',
-        'killDib', 'leave_cs', 'listen', 'manifest', 'mem_copy', 'mem_set',
-        'mpfr_gamma', 'mpfr_printf', 'mpfr_sprintf', 'mpz_export', 'mpz_import',
-        'namespace', 'new', 'newDib', 'open', 'open_dll', 'PE32', 'PE64',
-        'peek', 'peek_string', 'peek1s', 'peek1u', 'peek2s', 'peek2u', 'peek4s',
-        'peek4u', 'peek8s', 'peek8u', 'peekNS', 'peekns', 'peeknu', 'poke',
-        'poke2', 'poke4', 'poke8', 'pokeN', 'poke_string', 'poke_wstring',
-        'position', 'progress', 'prompt_number', 'prompt_string', 'read_file',
-        'read_lines', 'recv', 'resume_thread', 'seek', 'select', 'send',
-        'setHandler', 'shutdown', 'sleep', 'SO', 'sockaddr_in', 'socket',
-        'split_path', 'suspend_thread', 'system', 'system_exec', 'system_open',
-        'system_wait', 'task_clock_start', 'task_clock_stop', 'task_create',
-        'task_delay', 'task_list', 'task_schedule', 'task_self', 'task_status',
-        'task_suspend', 'task_yield', 'thread_safe_string', 'try_cs',
-        'utf8_to_utf16', 'utf16_to_utf8', 'utf16_to_utf32', 'utf32_to_utf16',
-        'video_config', 'WSACleanup', 'wait_thread', 'walk_dir', 'where',
-        'write_lines', 'wait_key'
-    )
-    constants = (
-        'ANY_QUEUE', 'ASCENDING', 'BLACK', 'BLOCK_CURSOR', 'BLUE',
-        'BRIGHT_CYAN', 'BRIGHT_BLUE', 'BRIGHT_GREEN', 'BRIGHT_MAGENTA',
-        'BRIGHT_RED', 'BRIGHT_WHITE', 'BROWN', 'C_DWORD', 'C_INT', 'C_POINTER',
-        'C_USHORT', 'C_WORD', 'CD_AMBER', 'CD_BLACK', 'CD_BLUE', 'CD_BOLD',
-        'CD_BOLD_ITALIC', 'CD_BOX', 'CD_CENTER', 'CD_CIRCLE', 'CD_CLOSED_LINES',
-        'CD_CONTINUOUS', 'CD_CUSTOM', 'CD_CYAN', 'CD_DARK_BLUE', 'CD_DARK_CYAN',
-        'CD_DARK_GRAY', 'CD_DARK_GREY', 'CD_DARK_GREEN', 'CD_DARK_MAGENTA',
-        'CD_DARK_RED', 'CD_DARK_YELLOW', 'CD_DASH_DOT', 'CD_DASH_DOT_DOT',
-        'CD_DASHED', 'CD_DBUFFER', 'CD_DEG2RAD', 'CD_DIAMOND', 'CD_DOTTED',
-        'CD_EAST', 'CD_EVENODD', 'CD_FILL', 'CD_GL', 'CD_GRAY', 'CD_GREY',
-        'CD_GREEN', 'CD_HATCH', 'CD_HOLLOW', 'CD_HOLLOW_BOX',
-        'CD_HOLLOW_CIRCLE', 'CD_HOLLOW_DIAMOND', 'CD_INDIGO', 'CD_ITALIC',
-        'CD_IUP', 'CD_IUPDBUFFER', 'CD_LIGHT_BLUE', 'CD_LIGHT_GRAY',
-        'CD_LIGHT_GREY', 'CD_LIGHT_GREEN', 'CD_LIGHT_PARCHMENT', 'CD_MAGENTA',
-        'CD_NAVY', 'CD_NORTH', 'CD_NORTH_EAST', 'CD_NORTH_WEST', 'CD_OLIVE',
-        'CD_OPEN_LINES', 'CD_ORANGE', 'CD_PARCHMENT', 'CD_PATTERN',
-        'CD_PRINTER', 'CD_PURPLE', 'CD_PLAIN', 'CD_PLUS', 'CD_QUERY',
-        'CD_RAD2DEG', 'CD_RED', 'CD_SILVER', 'CD_SOLID', 'CD_SOUTH_EAST',
-        'CD_SOUTH_WEST', 'CD_STAR', 'CD_STIPPLE', 'CD_STRIKEOUT',
-        'CD_UNDERLINE', 'CD_WEST', 'CD_WHITE', 'CD_WINDING', 'CD_VIOLET',
-        'CD_X', 'CD_YELLOW', 'CURLE_OK', 'CURLOPT_MAIL_FROM',
-        'CURLOPT_MAIL_RCPT', 'CURLOPT_PASSWORD', 'CURLOPT_READDATA',
-        'CURLOPT_READFUNCTION', 'CURLOPT_SSL_VERIFYPEER',
-        'CURLOPT_SSL_VERIFYHOST', 'CURLOPT_UPLOAD', 'CURLOPT_URL',
-        'CURLOPT_USE_SSL', 'CURLOPT_USERNAME', 'CURLOPT_VERBOSE',
-        'CURLOPT_WRITEFUNCTION', 'CURLUSESSL_ALL', 'CYAN', 'D_NAME',
-        'D_ATTRIBUTES', 'D_SIZE', 'D_YEAR', 'D_MONTH', 'D_DAY', 'D_HOUR',
-        'D_MINUTE', 'D_SECOND', 'D_CREATION', 'D_LASTACCESS', 'D_MODIFICATION',
-        'DT_YEAR', 'DT_MONTH', 'DT_DAY', 'DT_HOUR', 'DT_MINUTE', 'DT_SECOND',
-        'DT_DOW', 'DT_MSEC', 'DT_DOY', 'DT_GMT', 'EULER', 'E_CODE', 'E_ADDR',
-        'E_LINE', 'E_RTN', 'E_NAME', 'E_FILE', 'E_PATH', 'E_USER', 'false',
-        'False', 'FALSE', 'FIFO_QUEUE', 'FILETYPE_DIRECTORY', 'FILETYPE_FILE',
-        'GET_EOF', 'GET_FAIL', 'GET_IGNORE', 'GET_SUCCESS',
-        'GL_AMBIENT_AND_DIFFUSE', 'GL_ARRAY_BUFFER', 'GL_CLAMP',
-        'GL_CLAMP_TO_BORDER', 'GL_CLAMP_TO_EDGE', 'GL_COLOR_BUFFER_BIT',
-        'GL_COMPILE', 'GL_COMPILE_STATUS', 'GL_CULL_FACE',
-        'GL_DEPTH_BUFFER_BIT', 'GL_DEPTH_TEST', 'GL_EXTENSIONS', 'GL_FLAT',
-        'GL_FLOAT', 'GL_FRAGMENT_SHADER', 'GL_FRONT', 'GL_LIGHT0',
-        'GL_LIGHTING', 'GL_LINEAR', 'GL_LINK_STATUS', 'GL_MODELVIEW',
-        'GL_NEAREST', 'GL_NO_ERROR', 'GL_NORMALIZE', 'GL_POSITION',
-        'GL_PROJECTION', 'GL_QUAD_STRIP', 'GL_QUADS', 'GL_RENDERER',
-        'GL_REPEAT', 'GL_RGB', 'GL_RGBA', 'GL_SMOOTH', 'GL_STATIC_DRAW',
-        'GL_TEXTURE_2D', 'GL_TEXTURE_MAG_FILTER', 'GL_TEXTURE_MIN_FILTER',
-        'GL_TEXTURE_WRAP_S', 'GL_TEXTURE_WRAP_T', 'GL_TRIANGLES',
-        'GL_UNSIGNED_BYTE', 'GL_VENDOR', 'GL_VERSION', 'GL_VERTEX_SHADER',
-        'GRAY', 'GREEN', 'GT_LF_STRIPPED', 'GT_WHOLE_FILE', 'INVLN10',
-        'IUP_CLOSE', 'IUP_CONTINUE', 'IUP_DEFAULT', 'IUP_BLACK', 'IUP_BLUE',
-        'IUP_BUTTON1', 'IUP_BUTTON3', 'IUP_CENTER', 'IUP_CYAN', 'IUP_DARK_BLUE',
-        'IUP_DARK_CYAN', 'IUP_DARK_GRAY', 'IUP_DARK_GREY', 'IUP_DARK_GREEN',
-        'IUP_DARK_MAGENTA', 'IUP_DARK_RED', 'IUP_GRAY', 'IUP_GREY', 'IUP_GREEN',
-        'IUP_IGNORE', 'IUP_INDIGO', 'IUP_MAGENTA', 'IUP_MASK_INT',
-        'IUP_MASK_UINT', 'IUP_MOUSEPOS', 'IUP_NAVY', 'IUP_OLIVE', 'IUP_RECTEXT',
-        'IUP_RED', 'IUP_LIGHT_BLUE', 'IUP_LIGHT_GRAY', 'IUP_LIGHT_GREY',
-        'IUP_LIGHT_GREEN', 'IUP_ORANGE', 'IUP_PARCHMENT', 'IUP_PURPLE',
-        'IUP_SILVER', 'IUP_TEAL', 'IUP_VIOLET', 'IUP_WHITE', 'IUP_YELLOW',
-        'K_BS', 'K_cA', 'K_cC', 'K_cD', 'K_cF5', 'K_cK', 'K_cM', 'K_cN', 'K_cO',
-        'K_cP', 'K_cR', 'K_cS', 'K_cT', 'K_cW', 'K_CR', 'K_DEL', 'K_DOWN',
-        'K_END', 'K_ESC', 'K_F1', 'K_F2', 'K_F3', 'K_F4', 'K_F5', 'K_F6',
-        'K_F7', 'K_F8', 'K_F9', 'K_F10', 'K_F11', 'K_F12', 'K_HOME', 'K_INS',
-        'K_LEFT', 'K_MIDDLE', 'K_PGDN', 'K_PGUP', 'K_RIGHT', 'K_SP', 'K_TAB',
-        'K_UP', 'K_h', 'K_i', 'K_j', 'K_p', 'K_r', 'K_s', 'JS', 'LIFO_QUEUE',
-        'LINUX', 'MAX_HEAP', 'MAGENTA', 'MIN_HEAP', 'Nan', 'NO_CURSOR', 'null',
-        'NULL', 'PI', 'pp_Ascii', 'pp_Brkt', 'pp_Date', 'pp_File', 'pp_FltFmt',
-        'pp_Indent', 'pp_IntCh', 'pp_IntFmt', 'pp_Maxlen', 'pp_Nest',
-        'pp_Pause', 'pp_Q22', 'pp_StrFmt', 'RED', 'SEEK_OK', 'SLASH',
-        'TEST_ABORT', 'TEST_CRASH', 'TEST_PAUSE', 'TEST_PAUSE_FAIL',
-        'TEST_QUIET', 'TEST_SHOW_ALL', 'TEST_SHOW_FAILED', 'TEST_SUMMARY',
-        'true', 'True', 'TRUE', 'VC_SCRNLINES', 'WHITE', 'WINDOWS', 'YELLOW'
-    )
-
-    tokens = {
-        'root': [
-            (r"\s+", Whitespace),
-            (r'/\*|--/\*|#\[', Comment.Multiline, 'comment'),
-            (r'(?://|--|#!).*$', Comment.Single),
-#Alt:
-#           (r'//.*$|--.*$|#!.*$', Comment.Single),
-            (r'"([^"\\]|\\.)*"', String.Other),
-            (r'\'[^\']*\'', String.Other),
-            (r'`[^`]*`', String.Other),
-
-            (words(types, prefix=r'\b', suffix=r'\b'), Name.Function),
-            (words(routines, prefix=r'\b', suffix=r'\b'), Name.Function),
-            (words(preproc, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
-            (words(keywords, prefix=r'\b', suffix=r'\b'), Keyword.Declaration),
-            (words(constants, prefix=r'\b', suffix=r'\b'), Name.Constant),
-            # Aside: Phix only supports/uses the ascii/non-unicode tilde
-            (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|\.(){},?:\[\]$\\;#]', Operator),
-            (r'[\w-]+', Text)
-        ],
-        'comment': [
-            (r'[^*/#]+', Comment.Multiline),
-            (r'/\*|#\[', Comment.Multiline, '#push'),
-            (r'\*/|#\]', Comment.Multiline, '#pop'),
-            (r'[*/#]', Comment.Multiline)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/php.py b/venv/lib/python3.11/site-packages/pygments/lexers/php.py
deleted file mode 100644
index a0a0021..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/php.py
+++ /dev/null
@@ -1,335 +0,0 @@
-"""
-    pygments.lexers.php
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for PHP and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, bygroups, default, \
-    using, this, words, do_insertions, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Other, Generic
-from pygments.util import get_bool_opt, get_list_opt, shebang_matches
-
-__all__ = ['ZephirLexer', 'PsyshConsoleLexer', 'PhpLexer']
-
-
-class ZephirLexer(RegexLexer):
-    """
-    For Zephir language source code.
-
-    Zephir is a compiled high level language aimed
-    to the creation of C-extensions for PHP.
-
-    .. versionadded:: 2.0
-    """
-
-    name = 'Zephir'
-    url = 'http://zephir-lang.com/'
-    aliases = ['zephir']
-    filenames = ['*.zep']
-
-    zephir_keywords = ['fetch', 'echo', 'isset', 'empty']
-    zephir_type = ['bit', 'bits', 'string']
-
-    flags = re.DOTALL | re.MULTILINE
-
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'/', Operator, '#pop'),
-            default('#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'root': [
-            (r'^(?=\s|/)', Text, 'slashstartsregex'),
-            include('commentsandwhitespace'),
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|'
-             r'(<<|>>>?|==?|!=?|->|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'(for|in|while|do|break|return|continue|switch|case|default|if|else|loop|'
-             r'require|inline|throw|try|catch|finally|new|delete|typeof|instanceof|void|'
-             r'namespace|use|extends|this|fetch|isset|unset|echo|fetch|likely|unlikely|'
-             r'empty)\b', Keyword, 'slashstartsregex'),
-            (r'(var|let|with|function)\b', Keyword.Declaration, 'slashstartsregex'),
-            (r'(abstract|boolean|bool|char|class|const|double|enum|export|extends|final|'
-             r'native|goto|implements|import|int|string|interface|long|ulong|char|uchar|'
-             r'float|unsigned|private|protected|public|short|static|self|throws|reverse|'
-             r'transient|volatile|readonly)\b', Keyword.Reserved),
-            (r'(true|false|null|undefined)\b', Keyword.Constant),
-            (r'(Array|Boolean|Date|_REQUEST|_COOKIE|_SESSION|'
-             r'_GET|_POST|_SERVER|this|stdClass|range|count|iterator|'
-             r'window)\b', Name.Builtin),
-            (r'[$a-zA-Z_][\w\\]*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
-        ]
-    }
-
-
-class PsyshConsoleLexer(Lexer):
-    """
-    For PsySH console output, such as:
-
-    .. sourcecode:: psysh
-
-        >>> $greeting = function($name): string {
-        ...     return "Hello, {$name}";
-        ... };
-        => Closure($name): string {#2371 …3}
-        >>> $greeting('World')
-        => "Hello, World"
-
-    .. versionadded:: 2.7
-    """
-    name = 'PsySH console session for PHP'
-    url = 'https://psysh.org/'
-    aliases = ['psysh']
-
-    def __init__(self, **options):
-        options['startinline'] = True
-        Lexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        phplexer = PhpLexer(**self.options)
-        curcode = ''
-        insertions = []
-        for match in line_re.finditer(text):
-            line = match.group()
-            if line.startswith('>>> ') or line.startswith('... '):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:4])]))
-                curcode += line[4:]
-            elif line.rstrip() == '...':
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, '...')]))
-                curcode += line[3:]
-            else:
-                if curcode:
-                    yield from do_insertions(
-                        insertions, phplexer.get_tokens_unprocessed(curcode))
-                    curcode = ''
-                    insertions = []
-                yield match.start(), Generic.Output, line
-        if curcode:
-            yield from do_insertions(insertions,
-                                     phplexer.get_tokens_unprocessed(curcode))
-
-
-class PhpLexer(RegexLexer):
-    """
-    For PHP source code.
-    For PHP embedded in HTML, use the `HtmlPhpLexer`.
-
-    Additional options accepted:
-
-    `startinline`
-        If given and ``True`` the lexer starts highlighting with
-        php code (i.e.: no starting ``>> from pygments.lexers._php_builtins import MODULES
-            >>> MODULES.keys()
-            ['PHP Options/Info', 'Zip', 'dba', ...]
-
-        In fact the names of those modules match the module names from
-        the php documentation.
-    """
-
-    name = 'PHP'
-    url = 'https://www.php.net/'
-    aliases = ['php', 'php3', 'php4', 'php5']
-    filenames = ['*.php', '*.php[345]', '*.inc']
-    mimetypes = ['text/x-php']
-
-    # Note that a backslash is included, PHP uses a backslash as a namespace
-    # separator.
-    _ident_inner = r'(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*'
-    # But not inside strings.
-    _ident_nons = r'(?:[_a-z]|[^\x00-\x7f])(?:\w|[^\x00-\x7f])*'
-
-    flags = re.IGNORECASE | re.DOTALL | re.MULTILINE
-    tokens = {
-        'root': [
-            (r'<\?(php)?', Comment.Preproc, 'php'),
-            (r'[^<]+', Other),
-            (r'<', Other)
-        ],
-        'php': [
-            (r'\?>', Comment.Preproc, '#pop'),
-            (r'(<<<)([\'"]?)(' + _ident_nons + r')(\2\n.*?\n\s*)(\3)(;?)(\n)',
-             bygroups(String, String, String.Delimiter, String, String.Delimiter,
-                      Punctuation, Text)),
-            (r'\s+', Text),
-            (r'#\[', Punctuation, 'attribute'),
-            (r'#.*?\n', Comment.Single),
-            (r'//.*?\n', Comment.Single),
-            # put the empty comment here, it is otherwise seen as
-            # the start of a docstring
-            (r'/\*\*/', Comment.Multiline),
-            (r'/\*\*.*?\*/', String.Doc),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'(->|::)(\s*)(' + _ident_nons + ')',
-             bygroups(Operator, Text, Name.Attribute)),
-            (r'[~!%^&*+=|:.<>/@-]+', Operator),
-            (r'\?', Operator),  # don't add to the charclass above!
-            (r'[\[\]{}();,]+', Punctuation),
-            (r'(new)(\s+)(class)\b', bygroups(Keyword, Text, Keyword)),
-            (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(function)(\s*)(?=\()', bygroups(Keyword, Text)),
-            (r'(function)(\s+)(&?)(\s*)',
-             bygroups(Keyword, Text, Operator, Text), 'functionname'),
-            (r'(const)(\s+)(' + _ident_inner + ')',
-             bygroups(Keyword, Text, Name.Constant)),
-            (r'(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|'
-             r'eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|'
-             r'FALSE|print|for|require|continue|foreach|require_once|'
-             r'declare|return|default|static|do|switch|die|stdClass|'
-             r'echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|'
-             r'virtual|endfor|include_once|while|endforeach|global|'
-             r'endif|list|endswitch|new|endwhile|not|'
-             r'array|E_ALL|NULL|final|php_user_filter|interface|'
-             r'implements|public|private|protected|abstract|clone|try|'
-             r'catch|throw|this|use|namespace|trait|yield|'
-             r'finally|match)\b', Keyword),
-            (r'(true|false|null)\b', Keyword.Constant),
-            include('magicconstants'),
-            (r'\$\{', Name.Variable, 'variablevariable'),
-            (r'\$+' + _ident_inner, Name.Variable),
-            (_ident_inner, Name.Other),
-            (r'(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?', Number.Float),
-            (r'\d+e[+-]?[0-9]+', Number.Float),
-            (r'0[0-7]+', Number.Oct),
-            (r'0x[a-f0-9]+', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r'0b[01]+', Number.Bin),
-            (r"'([^'\\]*(?:\\.[^'\\]*)*)'", String.Single),
-            (r'`([^`\\]*(?:\\.[^`\\]*)*)`', String.Backtick),
-            (r'"', String.Double, 'string'),
-        ],
-        'variablevariable': [
-            (r'\}', Name.Variable, '#pop'),
-            include('php')
-        ],
-        'magicfuncs': [
-            # source: http://php.net/manual/en/language.oop5.magic.php
-            (words((
-                '__construct', '__destruct', '__call', '__callStatic', '__get', '__set',
-                '__isset', '__unset', '__sleep', '__wakeup', '__toString', '__invoke',
-                '__set_state', '__clone', '__debugInfo',), suffix=r'\b'),
-             Name.Function.Magic),
-        ],
-        'magicconstants': [
-            # source: http://php.net/manual/en/language.constants.predefined.php
-            (words((
-                '__LINE__', '__FILE__', '__DIR__', '__FUNCTION__', '__CLASS__',
-                '__TRAIT__', '__METHOD__', '__NAMESPACE__',),
-                suffix=r'\b'),
-             Name.Constant),
-        ],
-        'classname': [
-            (_ident_inner, Name.Class, '#pop')
-        ],
-        'functionname': [
-            include('magicfuncs'),
-            (_ident_inner, Name.Function, '#pop'),
-            default('#pop')
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'[^{$"\\]+', String.Double),
-            (r'\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})', String.Escape),
-            (r'\$' + _ident_nons + r'(\[\S+?\]|->' + _ident_nons + ')?',
-             String.Interpol),
-            (r'(\{\$\{)(.*?)(\}\})',
-             bygroups(String.Interpol, using(this, _startinline=True),
-                      String.Interpol)),
-            (r'(\{)(\$.*?)(\})',
-             bygroups(String.Interpol, using(this, _startinline=True),
-                      String.Interpol)),
-            (r'(\$\{)(\S+)(\})',
-             bygroups(String.Interpol, Name.Variable, String.Interpol)),
-            (r'[${\\]', String.Double)
-        ],
-        'attribute': [
-            (r'\]', Punctuation, '#pop'),
-            (r'\(', Punctuation, 'attributeparams'),
-            (_ident_inner, Name.Decorator),
-            include('php')
-        ],
-        'attributeparams': [
-            (r'\)', Punctuation, '#pop'),
-            include('php')
-        ],
-    }
-
-    def __init__(self, **options):
-        self.funcnamehighlighting = get_bool_opt(
-            options, 'funcnamehighlighting', True)
-        self.disabledmodules = get_list_opt(
-            options, 'disabledmodules', ['unknown'])
-        self.startinline = get_bool_opt(options, 'startinline', False)
-
-        # private option argument for the lexer itself
-        if '_startinline' in options:
-            self.startinline = options.pop('_startinline')
-
-        # collect activated functions in a set
-        self._functions = set()
-        if self.funcnamehighlighting:
-            from pygments.lexers._php_builtins import MODULES
-            for key, value in MODULES.items():
-                if key not in self.disabledmodules:
-                    self._functions.update(value)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        stack = ['root']
-        if self.startinline:
-            stack.append('php')
-        for index, token, value in \
-                RegexLexer.get_tokens_unprocessed(self, text, stack):
-            if token is Name.Other:
-                if value in self._functions:
-                    yield index, Name.Builtin, value
-                    continue
-            yield index, token, value
-
-    def analyse_text(text):
-        if shebang_matches(text, r'php'):
-            return True
-        rv = 0.0
-        if re.search(r'<\?(?!xml)', text):
-            rv += 0.3
-        return rv
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pointless.py b/venv/lib/python3.11/site-packages/pygments/lexers/pointless.py
deleted file mode 100644
index eb73b2a..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pointless.py
+++ /dev/null
@@ -1,71 +0,0 @@
-"""
-    pygments.lexers.pointless
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Pointless.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Error, Keyword, Name, Number, Operator, \
-    Punctuation, String, Text
-
-__all__ = ['PointlessLexer']
-
-
-class PointlessLexer(RegexLexer):
-    """
-    For Pointless source code.
-
-    .. versionadded:: 2.7
-    """
-
-    name = 'Pointless'
-    url = 'https://ptls.dev'
-    aliases = ['pointless']
-    filenames = ['*.ptls']
-
-    ops = words([
-        "+", "-", "*", "/", "**", "%", "+=", "-=", "*=",
-        "/=", "**=", "%=", "|>", "=", "==", "!=", "<", ">",
-        "<=", ">=", "=>", "$", "++",
-    ])
-
-    keywords = words([
-        "if", "then", "else", "where", "with", "cond",
-        "case", "and", "or", "not", "in", "as", "for",
-        "requires", "throw", "try", "catch", "when",
-        "yield", "upval",
-    ], suffix=r'\b')
-
-    tokens = {
-        'root': [
-            (r'[ \n\r]+', Text),
-            (r'--.*$', Comment.Single),
-            (r'"""', String, 'multiString'),
-            (r'"', String, 'string'),
-            (r'[\[\](){}:;,.]', Punctuation),
-            (ops, Operator),
-            (keywords, Keyword),
-            (r'\d+|\d*\.\d+', Number),
-            (r'(true|false)\b', Name.Builtin),
-            (r'[A-Z][a-zA-Z0-9]*\b', String.Symbol),
-            (r'output\b', Name.Variable.Magic),
-            (r'(export|import)\b', Keyword.Namespace),
-            (r'[a-z][a-zA-Z0-9]*\b', Name.Variable)
-        ],
-        'multiString': [
-            (r'\\.', String.Escape),
-            (r'"""', String, '#pop'),
-            (r'"', String),
-            (r'[^\\"]+', String),
-        ],
-        'string': [
-            (r'\\.', String.Escape),
-            (r'"', String, '#pop'),
-            (r'\n', Error),
-            (r'[^\\"]+', String),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/pony.py b/venv/lib/python3.11/site-packages/pygments/lexers/pony.py
deleted file mode 100644
index 03adc5f..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/pony.py
+++ /dev/null
@@ -1,93 +0,0 @@
-"""
-    pygments.lexers.pony
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Pony and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['PonyLexer']
-
-
-class PonyLexer(RegexLexer):
-    """
-    For Pony source code.
-
-    .. versionadded:: 2.4
-    """
-
-    name = 'Pony'
-    aliases = ['pony']
-    filenames = ['*.pony']
-
-    _caps = r'(iso|trn|ref|val|box|tag)'
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'[^\S\n]+', Text),
-            (r'//.*\n', Comment.Single),
-            (r'/\*', Comment.Multiline, 'nested_comment'),
-            (r'"""(?:.|\n)*?"""', String.Doc),
-            (r'"', String, 'string'),
-            (r'\'.*\'', String.Char),
-            (r'=>|[]{}:().~;,|&!^?[]', Punctuation),
-            (words((
-                'addressof', 'and', 'as', 'consume', 'digestof', 'is', 'isnt',
-                'not', 'or'),
-                suffix=r'\b'),
-             Operator.Word),
-            (r'!=|==|<<|>>|[-+/*%=<>]', Operator),
-            (words((
-                'box', 'break', 'compile_error', 'compile_intrinsic',
-                'continue', 'do', 'else', 'elseif', 'embed', 'end', 'error',
-                'for', 'if', 'ifdef', 'in', 'iso', 'lambda', 'let', 'match',
-                'object', 'recover', 'ref', 'repeat', 'return', 'tag', 'then',
-                'this', 'trn', 'try', 'until', 'use', 'var', 'val', 'where',
-                'while', 'with', '#any', '#read', '#send', '#share'),
-                suffix=r'\b'),
-             Keyword),
-            (r'(actor|class|struct|primitive|interface|trait|type)((?:\s)+)',
-             bygroups(Keyword, Text), 'typename'),
-            (r'(new|fun|be)((?:\s)+)', bygroups(Keyword, Text), 'methodname'),
-            (words((
-                'I8', 'U8', 'I16', 'U16', 'I32', 'U32', 'I64', 'U64', 'I128',
-                'U128', 'ILong', 'ULong', 'ISize', 'USize', 'F32', 'F64',
-                'Bool', 'Pointer', 'None', 'Any', 'Array', 'String',
-                'Iterator'),
-                suffix=r'\b'),
-             Name.Builtin.Type),
-            (r'_?[A-Z]\w*', Name.Type),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r'(true|false)\b', Name.Builtin),
-            (r'_\d*', Name),
-            (r'_?[a-z][\w\']*', Name)
-        ],
-        'typename': [
-            (_caps + r'?((?:\s)*)(_?[A-Z]\w*)',
-             bygroups(Keyword, Text, Name.Class), '#pop')
-        ],
-        'methodname': [
-            (_caps + r'?((?:\s)*)(_?[a-z]\w*)',
-             bygroups(Keyword, Text, Name.Function), '#pop')
-        ],
-        'nested_comment': [
-            (r'[^*/]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline)
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r'\\"', String),
-            (r'[^\\"]+', String)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/praat.py b/venv/lib/python3.11/site-packages/pygments/lexers/praat.py
deleted file mode 100644
index c8533a6..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/praat.py
+++ /dev/null
@@ -1,304 +0,0 @@
-"""
-    pygments.lexers.praat
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Praat
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, bygroups, include
-from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, \
-    Number, Operator, Whitespace
-
-__all__ = ['PraatLexer']
-
-
-class PraatLexer(RegexLexer):
-    """
-    For Praat scripts.
-
-    .. versionadded:: 2.1
-    """
-
-    name = 'Praat'
-    url = 'http://www.praat.org'
-    aliases = ['praat']
-    filenames = ['*.praat', '*.proc', '*.psc']
-
-    keywords = (
-        'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
-        'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
-        'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
-        'editor', 'endeditor', 'clearinfo',
-    )
-
-    functions_string = (
-        'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
-        'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
-        'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
-        'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
-    )
-
-    functions_numeric = (
-        'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
-        'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
-        'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
-        'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
-        'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
-        'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
-        'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
-        'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
-        'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
-        'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
-        'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
-        'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
-        'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
-        'index_regex', 'integer', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
-        'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
-        'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
-        'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
-        'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
-        'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
-        'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
-        'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
-        'selected', 'semitonesToHertz', 'sentence', 'sentencetext', 'sigmoid', 'sin', 'sinc',
-        'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
-        'studentQ', 'tan', 'tanh', 'text', 'variableExists', 'word', 'writeFile', 'writeFileLine',
-        'writeInfo', 'writeInfoLine',
-    )
-
-    functions_array = (
-        'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
-    )
-
-    objects = (
-        'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
-        'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
-        'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
-        'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
-        'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
-        'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
-        'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
-        'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
-        'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
-        'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
-        'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
-        'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
-        'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
-        'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
-        'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
-        'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
-        'MixingMatrix', 'Movie', 'Network', 'Object', 'OTGrammar', 'OTHistory', 'OTMulti',
-        'PCA', 'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo',
-        'Pitch', 'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
-        'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
-        'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
-        'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
-        'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
-        'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
-        'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
-        'Weight', 'WordList',
-    )
-
-    variables_numeric = (
-        'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
-    )
-
-    variables_string = (
-        'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
-        'preferencesDirectory', 'newline', 'temporaryDirectory',
-        'defaultDirectory',
-    )
-
-    object_attributes = (
-        'ncol', 'nrow', 'xmin', 'ymin', 'xmax', 'ymax', 'nx', 'ny', 'dx', 'dy',
-    )
-
-    tokens = {
-        'root': [
-            (r'(\s+)(#.*?$)',  bygroups(Whitespace, Comment.Single)),
-            (r'^#.*?$',        Comment.Single),
-            (r';[^\n]*',       Comment.Single),
-            (r'\s+',           Whitespace),
-
-            (r'\bprocedure\b', Keyword,       'procedure_definition'),
-            (r'\bcall\b',      Keyword,       'procedure_call'),
-            (r'@',             Name.Function, 'procedure_call'),
-
-            include('function_call'),
-
-            (words(keywords, suffix=r'\b'), Keyword),
-
-            (r'(\bform\b)(\s+)([^\n]+)',
-             bygroups(Keyword, Whitespace, String), 'old_form'),
-
-            (r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
-             r'include|execute|system(?:_nocheck)?)(\s+)',
-             bygroups(Keyword, Whitespace), 'string_unquoted'),
-
-            (r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Whitespace, Name.Label)),
-
-            include('variable_name'),
-            include('number'),
-
-            (r'"', String, 'string'),
-
-            (words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
-
-            (r'\b[A-Z]', Keyword, 'command'),
-            (r'(\.{3}|[)(,])', Punctuation),
-        ],
-        'command': [
-            (r'( ?[\w()-]+ ?)', Keyword),
-
-            include('string_interpolated'),
-
-            (r'\.{3}', Keyword, ('#pop', 'old_arguments')),
-            (r':', Keyword, ('#pop', 'comma_list')),
-            (r'\s', Whitespace, '#pop'),
-        ],
-        'procedure_call': [
-            (r'\s+', Whitespace),
-            (r'([\w.]+)(?:(:)|(?:(\s*)(\()))',
-             bygroups(Name.Function, Punctuation,
-                      Text.Whitespace, Punctuation), '#pop'),
-            (r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
-        ],
-        'procedure_definition': [
-            (r'\s', Whitespace),
-            (r'([\w.]+)(\s*?[(:])',
-             bygroups(Name.Function, Whitespace), '#pop'),
-            (r'([\w.]+)([^\n]*)',
-             bygroups(Name.Function, Text), '#pop'),
-        ],
-        'function_call': [
-            (words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
-            (words(functions_array, suffix=r'#(?=\s*[:(])'),   Name.Function, 'function'),
-            (words(functions_numeric, suffix=r'(?=\s*[:(])'),  Name.Function, 'function'),
-        ],
-        'function': [
-            (r'\s+',   Whitespace),
-            (r':',     Punctuation, ('#pop', 'comma_list')),
-            (r'\s*\(', Punctuation, ('#pop', 'comma_list')),
-        ],
-        'comma_list': [
-            (r'(\s*\n\s*)(\.{3})', bygroups(Whitespace, Punctuation)),
-
-            (r'(\s*)(?:([)\]])|(\n))', bygroups(
-                Whitespace, Punctuation, Whitespace), '#pop'),
-
-            (r'\s+', Whitespace),
-            (r'"',   String, 'string'),
-            (r'\b(if|then|else|fi|endif)\b', Keyword),
-
-            include('function_call'),
-            include('variable_name'),
-            include('operator'),
-            include('number'),
-
-            (r'[()]', Text),
-            (r',', Punctuation),
-        ],
-        'old_arguments': [
-            (r'\n', Whitespace, '#pop'),
-
-            include('variable_name'),
-            include('operator'),
-            include('number'),
-
-            (r'"', String, 'string'),
-            (r'[^\n]', Text),
-        ],
-        'number': [
-            (r'\n', Whitespace, '#pop'),
-            (r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
-        ],
-        'object_reference': [
-            include('string_interpolated'),
-            (r'([a-z][a-zA-Z0-9_]*|\d+)', Name.Builtin),
-
-            (words(object_attributes, prefix=r'\.'), Name.Builtin, '#pop'),
-
-            (r'\$', Name.Builtin),
-            (r'\[', Text, '#pop'),
-        ],
-        'variable_name': [
-            include('operator'),
-            include('number'),
-
-            (words(variables_string,  suffix=r'\$'), Name.Variable.Global),
-            (words(variables_numeric,
-             suffix=r'(?=[^a-zA-Z0-9_."\'$#\[:(]|\s|^|$)'),
-             Name.Variable.Global),
-
-            (words(objects, prefix=r'\b', suffix=r"(_)"),
-             bygroups(Name.Builtin, Name.Builtin),
-             'object_reference'),
-
-            (r'\.?_?[a-z][\w.]*(\$|#)?', Text),
-            (r'[\[\]]', Punctuation, 'comma_list'),
-
-            include('string_interpolated'),
-        ],
-        'operator': [
-            (r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)',       Operator),
-            (r'(?', Punctuation),
-            (r'"(?:\\x[0-9a-fA-F]+\\|\\u[0-9a-fA-F]{4}|\\U[0-9a-fA-F]{8}|'
-             r'\\[0-7]+\\|\\["\\abcefnrstv]|[^\\"])*"', String.Double),
-            (r"'(?:''|[^'])*'", String.Atom),  # quoted atom
-            # Needs to not be followed by an atom.
-            # (r'=(?=\s|[a-zA-Z\[])', Operator),
-            (r'is\b', Operator),
-            (r'(<|>|=<|>=|==|=:=|=|/|//|\*|\+|-)(?=\s|[a-zA-Z0-9\[])',
-             Operator),
-            (r'(mod|div|not)\b', Operator),
-            (r'_', Keyword),  # The don't-care variable
-            (r'([a-z]+)(:)', bygroups(Name.Namespace, Punctuation)),
-            (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
-             r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
-             r'(\s*)(:-|-->)',
-             bygroups(Name.Function, Text, Operator)),  # function defn
-            (r'([a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
-             r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*)'
-             r'(\s*)(\()',
-             bygroups(Name.Function, Text, Punctuation)),
-            (r'[a-z\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]'
-             r'[\w$\u00c0-\u1fff\u3040-\ud7ff\ue000-\uffef]*',
-             String.Atom),  # atom, characters
-            # This one includes !
-            (r'[#&*+\-./:<=>?@\\^~\u00a1-\u00bf\u2010-\u303f]+',
-             String.Atom),  # atom, graphics
-            (r'[A-Z_]\w*', Name.Variable),
-            (r'\s+|[\u2000-\u200f\ufff0-\ufffe\uffef]', Text),
-        ],
-        'nested-comment': [
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'[^*/]+', Comment.Multiline),
-            (r'[*/]', Comment.Multiline),
-        ],
-    }
-
-    def analyse_text(text):
-        """Competes with IDL and Visual Prolog on *.pro"""
-        if ':-' in text:
-            # Visual Prolog also uses :-
-            return 0.5
-        else:
-            return 0
-
-
-class LogtalkLexer(RegexLexer):
-    """
-    For Logtalk source code.
-
-    .. versionadded:: 0.10
-    """
-
-    name = 'Logtalk'
-    url = 'http://logtalk.org/'
-    aliases = ['logtalk']
-    filenames = ['*.lgt', '*.logtalk']
-    mimetypes = ['text/x-logtalk']
-
-    tokens = {
-        'root': [
-            # Directives
-            (r'^\s*:-\s', Punctuation, 'directive'),
-            # Comments
-            (r'%.*?\n', Comment),
-            (r'/\*(.|\n)*?\*/', Comment),
-            # Whitespace
-            (r'\n', Text),
-            (r'\s+', Text),
-            # Numbers
-            (r"0'[\\]?.", Number),
-            (r'0b[01]+', Number.Bin),
-            (r'0o[0-7]+', Number.Oct),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
-            # Variables
-            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
-            # Event handlers
-            (r'(after|before)(?=[(])', Keyword),
-            # Message forwarding handler
-            (r'forward(?=[(])', Keyword),
-            # Execution-context methods
-            (r'(context|parameter|this|se(lf|nder))(?=[(])', Keyword),
-            # Reflection
-            (r'(current_predicate|predicate_property)(?=[(])', Keyword),
-            # DCGs and term expansion
-            (r'(expand_(goal|term)|(goal|term)_expansion|phrase)(?=[(])', Keyword),
-            # Entity
-            (r'(abolish|c(reate|urrent))_(object|protocol|category)(?=[(])', Keyword),
-            (r'(object|protocol|category)_property(?=[(])', Keyword),
-            # Entity relations
-            (r'co(mplements_object|nforms_to_protocol)(?=[(])', Keyword),
-            (r'extends_(object|protocol|category)(?=[(])', Keyword),
-            (r'imp(lements_protocol|orts_category)(?=[(])', Keyword),
-            (r'(instantiat|specializ)es_class(?=[(])', Keyword),
-            # Events
-            (r'(current_event|(abolish|define)_events)(?=[(])', Keyword),
-            # Flags
-            (r'(create|current|set)_logtalk_flag(?=[(])', Keyword),
-            # Compiling, loading, and library paths
-            (r'logtalk_(compile|l(ibrary_path|oad|oad_context)|make(_target_action)?)(?=[(])', Keyword),
-            (r'\blogtalk_make\b', Keyword),
-            # Database
-            (r'(clause|retract(all)?)(?=[(])', Keyword),
-            (r'a(bolish|ssert(a|z))(?=[(])', Keyword),
-            # Control constructs
-            (r'(ca(ll|tch)|throw)(?=[(])', Keyword),
-            (r'(fa(il|lse)|true|(instantiation|system)_error)\b', Keyword),
-            (r'(type|domain|existence|permission|representation|evaluation|resource|syntax)_error(?=[(])', Keyword),
-            # All solutions
-            (r'((bag|set)of|f(ind|or)all)(?=[(])', Keyword),
-            # Multi-threading predicates
-            (r'threaded(_(ca(ll|ncel)|once|ignore|exit|peek|wait|notify))?(?=[(])', Keyword),
-            # Engine predicates
-            (r'threaded_engine(_(create|destroy|self|next|next_reified|yield|post|fetch))?(?=[(])', Keyword),
-            # Term unification
-            (r'(subsumes_term|unify_with_occurs_check)(?=[(])', Keyword),
-            # Term creation and decomposition
-            (r'(functor|arg|copy_term|numbervars|term_variables)(?=[(])', Keyword),
-            # Evaluable functors
-            (r'(div|rem|m(ax|in|od)|abs|sign)(?=[(])', Keyword),
-            (r'float(_(integer|fractional)_part)?(?=[(])', Keyword),
-            (r'(floor|t(an|runcate)|round|ceiling)(?=[(])', Keyword),
-            # Other arithmetic functors
-            (r'(cos|a(cos|sin|tan|tan2)|exp|log|s(in|qrt)|xor)(?=[(])', Keyword),
-            # Term testing
-            (r'(var|atom(ic)?|integer|float|c(allable|ompound)|n(onvar|umber)|ground|acyclic_term)(?=[(])', Keyword),
-            # Term comparison
-            (r'compare(?=[(])', Keyword),
-            # Stream selection and control
-            (r'(curren|se)t_(in|out)put(?=[(])', Keyword),
-            (r'(open|close)(?=[(])', Keyword),
-            (r'flush_output(?=[(])', Keyword),
-            (r'(at_end_of_stream|flush_output)\b', Keyword),
-            (r'(stream_property|at_end_of_stream|set_stream_position)(?=[(])', Keyword),
-            # Character and byte input/output
-            (r'(nl|(get|peek|put)_(byte|c(har|ode)))(?=[(])', Keyword),
-            (r'\bnl\b', Keyword),
-            # Term input/output
-            (r'read(_term)?(?=[(])', Keyword),
-            (r'write(q|_(canonical|term))?(?=[(])', Keyword),
-            (r'(current_)?op(?=[(])', Keyword),
-            (r'(current_)?char_conversion(?=[(])', Keyword),
-            # Atomic term processing
-            (r'atom_(length|c(hars|o(ncat|des)))(?=[(])', Keyword),
-            (r'(char_code|sub_atom)(?=[(])', Keyword),
-            (r'number_c(har|ode)s(?=[(])', Keyword),
-            # Implementation defined hooks functions
-            (r'(se|curren)t_prolog_flag(?=[(])', Keyword),
-            (r'\bhalt\b', Keyword),
-            (r'halt(?=[(])', Keyword),
-            # Message sending operators
-            (r'(::|:|\^\^)', Operator),
-            # External call
-            (r'[{}]', Keyword),
-            # Logic and control
-            (r'(ignore|once)(?=[(])', Keyword),
-            (r'\brepeat\b', Keyword),
-            # Sorting
-            (r'(key)?sort(?=[(])', Keyword),
-            # Bitwise functors
-            (r'(>>|<<|/\\|\\\\|\\)', Operator),
-            # Predicate aliases
-            (r'\bas\b', Operator),
-            # Arithmetic evaluation
-            (r'\bis\b', Keyword),
-            # Arithmetic comparison
-            (r'(=:=|=\\=|<|=<|>=|>)', Operator),
-            # Term creation and decomposition
-            (r'=\.\.', Operator),
-            # Term unification
-            (r'(=|\\=)', Operator),
-            # Term comparison
-            (r'(==|\\==|@=<|@<|@>=|@>)', Operator),
-            # Evaluable functors
-            (r'(//|[-+*/])', Operator),
-            (r'\b(e|pi|div|mod|rem)\b', Operator),
-            # Other arithmetic functors
-            (r'\b\*\*\b', Operator),
-            # DCG rules
-            (r'-->', Operator),
-            # Control constructs
-            (r'([!;]|->)', Operator),
-            # Logic and control
-            (r'\\+', Operator),
-            # Mode operators
-            (r'[?@]', Operator),
-            # Existential quantifier
-            (r'\^', Operator),
-            # Strings
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            # Punctuation
-            (r'[()\[\],.|]', Text),
-            # Atoms
-            (r"[a-z][a-zA-Z0-9_]*", Text),
-            (r"'", String, 'quoted_atom'),
-        ],
-
-        'quoted_atom': [
-            (r"''", String),
-            (r"'", String, '#pop'),
-            (r'\\([\\abfnrtv"\']|(x[a-fA-F0-9]+|[0-7]+)\\)', String.Escape),
-            (r"[^\\'\n]+", String),
-            (r'\\', String),
-        ],
-
-        'directive': [
-            # Conditional compilation directives
-            (r'(el)?if(?=[(])', Keyword, 'root'),
-            (r'(e(lse|ndif))(?=[.])', Keyword, 'root'),
-            # Entity directives
-            (r'(category|object|protocol)(?=[(])', Keyword, 'entityrelations'),
-            (r'(end_(category|object|protocol))(?=[.])', Keyword, 'root'),
-            # Predicate scope directives
-            (r'(public|protected|private)(?=[(])', Keyword, 'root'),
-            # Other directives
-            (r'e(n(coding|sure_loaded)|xport)(?=[(])', Keyword, 'root'),
-            (r'in(clude|itialization|fo)(?=[(])', Keyword, 'root'),
-            (r'(built_in|dynamic|synchronized|threaded)(?=[.])', Keyword, 'root'),
-            (r'(alias|d(ynamic|iscontiguous)|m(eta_(non_terminal|predicate)|ode|ultifile)|s(et_(logtalk|prolog)_flag|ynchronized))(?=[(])', Keyword, 'root'),
-            (r'op(?=[(])', Keyword, 'root'),
-            (r'(c(alls|oinductive)|module|reexport|use(s|_module))(?=[(])', Keyword, 'root'),
-            (r'[a-z][a-zA-Z0-9_]*(?=[(])', Text, 'root'),
-            (r'[a-z][a-zA-Z0-9_]*(?=[.])', Text, 'root'),
-        ],
-
-        'entityrelations': [
-            (r'(complements|extends|i(nstantiates|mp(lements|orts))|specializes)(?=[(])', Keyword),
-            # Numbers
-            (r"0'[\\]?.", Number),
-            (r'0b[01]+', Number.Bin),
-            (r'0o[0-7]+', Number.Oct),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'\d+\.?\d*((e|E)(\+|-)?\d+)?', Number),
-            # Variables
-            (r'([A-Z_][a-zA-Z0-9_]*)', Name.Variable),
-            # Atoms
-            (r"[a-z][a-zA-Z0-9_]*", Text),
-            (r"'", String, 'quoted_atom'),
-            # Strings
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            # End of entity-opening directive
-            (r'([)]\.)', Text, 'root'),
-            # Scope operator
-            (r'(::)', Operator),
-            # Punctuation
-            (r'[()\[\],.|]', Text),
-            # Comments
-            (r'%.*?\n', Comment),
-            (r'/\*(.|\n)*?\*/', Comment),
-            # Whitespace
-            (r'\n', Text),
-            (r'\s+', Text),
-        ]
-    }
-
-    def analyse_text(text):
-        if ':- object(' in text:
-            return 1.0
-        elif ':- protocol(' in text:
-            return 1.0
-        elif ':- category(' in text:
-            return 1.0
-        elif re.search(r'^:-\s[a-z]', text, re.M):
-            return 0.9
-        else:
-            return 0.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/promql.py b/venv/lib/python3.11/site-packages/pygments/lexers/promql.py
deleted file mode 100644
index b6d2d66..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/promql.py
+++ /dev/null
@@ -1,175 +0,0 @@
-"""
-    pygments.lexers.promql
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Prometheus Query Language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, default, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
-    Punctuation, String, Whitespace
-
-__all__ = ["PromQLLexer"]
-
-
-class PromQLLexer(RegexLexer):
-    """
-    For PromQL queries.
-
-    For details about the grammar see:
-    https://github.com/prometheus/prometheus/tree/master/promql/parser
-
-    .. versionadded: 2.7
-    """
-
-    name = "PromQL"
-    url = 'https://prometheus.io/docs/prometheus/latest/querying/basics/'
-    aliases = ["promql"]
-    filenames = ["*.promql"]
-
-    base_keywords = (
-        words(
-            (
-                "bool",
-                "by",
-                "group_left",
-                "group_right",
-                "ignoring",
-                "offset",
-                "on",
-                "without",
-            ),
-            suffix=r"\b",
-        ),
-        Keyword,
-    )
-
-    aggregator_keywords = (
-        words(
-            (
-                "sum",
-                "min",
-                "max",
-                "avg",
-                "group",
-                "stddev",
-                "stdvar",
-                "count",
-                "count_values",
-                "bottomk",
-                "topk",
-                "quantile",
-            ),
-            suffix=r"\b",
-        ),
-        Keyword,
-    )
-
-    function_keywords = (
-        words(
-            (
-                "abs",
-                "absent",
-                "absent_over_time",
-                "avg_over_time",
-                "ceil",
-                "changes",
-                "clamp_max",
-                "clamp_min",
-                "count_over_time",
-                "day_of_month",
-                "day_of_week",
-                "days_in_month",
-                "delta",
-                "deriv",
-                "exp",
-                "floor",
-                "histogram_quantile",
-                "holt_winters",
-                "hour",
-                "idelta",
-                "increase",
-                "irate",
-                "label_join",
-                "label_replace",
-                "ln",
-                "log10",
-                "log2",
-                "max_over_time",
-                "min_over_time",
-                "minute",
-                "month",
-                "predict_linear",
-                "quantile_over_time",
-                "rate",
-                "resets",
-                "round",
-                "scalar",
-                "sort",
-                "sort_desc",
-                "sqrt",
-                "stddev_over_time",
-                "stdvar_over_time",
-                "sum_over_time",
-                "time",
-                "timestamp",
-                "vector",
-                "year",
-            ),
-            suffix=r"\b",
-        ),
-        Keyword.Reserved,
-    )
-
-    tokens = {
-        "root": [
-            (r"\n", Whitespace),
-            (r"\s+", Whitespace),
-            (r",", Punctuation),
-            # Keywords
-            base_keywords,
-            aggregator_keywords,
-            function_keywords,
-            # Offsets
-            (r"[1-9][0-9]*[smhdwy]", String),
-            # Numbers
-            (r"-?[0-9]+\.[0-9]+", Number.Float),
-            (r"-?[0-9]+", Number.Integer),
-            # Comments
-            (r"#.*?$", Comment.Single),
-            # Operators
-            (r"(\+|\-|\*|\/|\%|\^)", Operator),
-            (r"==|!=|>=|<=|<|>", Operator),
-            (r"and|or|unless", Operator.Word),
-            # Metrics
-            (r"[_a-zA-Z][a-zA-Z0-9_]+", Name.Variable),
-            # Params
-            (r'(["\'])(.*?)(["\'])', bygroups(Punctuation, String, Punctuation)),
-            # Other states
-            (r"\(", Operator, "function"),
-            (r"\)", Operator),
-            (r"\{", Punctuation, "labels"),
-            (r"\[", Punctuation, "range"),
-        ],
-        "labels": [
-            (r"\}", Punctuation, "#pop"),
-            (r"\n", Whitespace),
-            (r"\s+", Whitespace),
-            (r",", Punctuation),
-            (r'([_a-zA-Z][a-zA-Z0-9_]*?)(\s*?)(=~|!=|=|!~)(\s*?)("|\')(.*?)("|\')',
-             bygroups(Name.Label, Whitespace, Operator, Whitespace,
-                      Punctuation, String, Punctuation)),
-        ],
-        "range": [
-            (r"\]", Punctuation, "#pop"),
-            (r"[1-9][0-9]*[smhdwy]", String),
-        ],
-        "function": [
-            (r"\)", Operator, "#pop"),
-            (r"\(", Operator, "#push"),
-            default("#pop"),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/prql.py b/venv/lib/python3.11/site-packages/pygments/lexers/prql.py
deleted file mode 100644
index 4c2f12e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/prql.py
+++ /dev/null
@@ -1,252 +0,0 @@
-"""
-    pygments.lexers.prql
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for the PRQL query language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, combined, words, include, bygroups
-from pygments.token import Comment, Literal, Keyword, Name, Number, Operator, \
-    Punctuation, String, Text, Whitespace
-
-__all__ = ['PrqlLexer']
-
-
-class PrqlLexer(RegexLexer):
-    """
-    For PRQL source code.
-
-    .. versionadded:: 2.17
-
-    grammar: https://github.com/PRQL/prql/tree/main/grammars
-    """
-
-    name = 'PRQL'
-    url = 'https://prql-lang.org/'
-    aliases = ['prql']
-    filenames = ['*.prql']
-    mimetypes = ['application/prql', 'application/x-prql']
-
-    builtinTypes = words((
-        "bool",
-        "int",
-        "int8", "int16", "int32", "int64", "int128",
-        "float",
-        "text",
-        "set"), suffix=r'\b')
-
-    def innerstring_rules(ttype):
-        return [
-            # the new style '{}'.format(...) string formatting
-            (r'\{'
-             r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?'  # field name
-             r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
-             r'\}', String.Interpol),
-
-            (r'[^\\\'"%{\n]+', ttype),
-            (r'[\'"\\]', ttype),
-            (r'%|(\{{1,2})', ttype)
-        ]
-
-    def fstring_rules(ttype):
-        return [
-            (r'\}', String.Interpol),
-            (r'\{', String.Interpol, 'expr-inside-fstring'),
-            (r'[^\\\'"{}\n]+', ttype),
-            (r'[\'"\\]', ttype),
-        ]
-
-    tokens = {
-        'root': [
-
-            # Comments
-            (r'#!.*', String.Doc),
-            (r'#.*', Comment.Single),
-
-            # Whitespace
-            (r'\s+', Whitespace),
-
-            # Modules
-            (r'^(\s*)(module)(\s*)',
-             bygroups(Whitespace, Keyword.Namespace, Whitespace),
-             'imports'),
-
-            (builtinTypes, Keyword.Type),
-
-            # Main
-            (r'^prql ', Keyword.Reserved),
-
-            ('let', Keyword.Declaration),
-
-            include('keywords'),
-            include('expr'),
-
-            # Transforms
-            (r'^[A-Za-z_][a-zA-Z0-9_]*', Keyword),
-        ],
-        'expr': [
-            # non-raw f-strings
-            ('(f)(""")', bygroups(String.Affix, String.Double),
-             combined('fstringescape', 'tdqf')),
-            ("(f)(''')", bygroups(String.Affix, String.Single),
-             combined('fstringescape', 'tsqf')),
-            ('(f)(")', bygroups(String.Affix, String.Double),
-             combined('fstringescape', 'dqf')),
-            ("(f)(')", bygroups(String.Affix, String.Single),
-             combined('fstringescape', 'sqf')),
-
-            # non-raw s-strings
-            ('(s)(""")', bygroups(String.Affix, String.Double),
-             combined('stringescape', 'tdqf')),
-            ("(s)(''')", bygroups(String.Affix, String.Single),
-             combined('stringescape', 'tsqf')),
-            ('(s)(")', bygroups(String.Affix, String.Double),
-             combined('stringescape', 'dqf')),
-            ("(s)(')", bygroups(String.Affix, String.Single),
-             combined('stringescape', 'sqf')),
-
-            # raw strings
-            ('(?i)(r)(""")',
-             bygroups(String.Affix, String.Double), 'tdqs'),
-            ("(?i)(r)(''')",
-             bygroups(String.Affix, String.Single), 'tsqs'),
-            ('(?i)(r)(")',
-             bygroups(String.Affix, String.Double), 'dqs'),
-            ("(?i)(r)(')",
-             bygroups(String.Affix, String.Single), 'sqs'),
-
-            # non-raw strings
-            ('"""', String.Double, combined('stringescape', 'tdqs')),
-            ("'''", String.Single, combined('stringescape', 'tsqs')),
-            ('"', String.Double, combined('stringescape', 'dqs')),
-            ("'", String.Single, combined('stringescape', 'sqs')),
-
-            # Time and dates
-            (r'@\d{4}-\d{2}-\d{2}T\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
-            (r'@\d{4}-\d{2}-\d{2}', Literal.Date),
-            (r'@\d{2}(:\d{2})?(:\d{2})?(\.\d{1,6})?(Z|[+-]\d{1,2}(:\d{1,2})?)?', Literal.Date),
-
-            (r'[^\S\n]+', Text),
-            include('numbers'),
-            (r'->|=>|==|!=|>=|<=|~=|&&|\|\||\?\?|\/\/', Operator),
-            (r'[-~+/*%=<>&^|.@]', Operator),
-            (r'[]{}:(),;[]', Punctuation),
-            include('functions'),
-
-            # Variable Names
-            (r'[A-Za-z_][a-zA-Z0-9_]*', Name.Variable),
-        ],
-        'numbers': [
-            (r'(\d(?:_?\d)*\.(?:\d(?:_?\d)*)?|(?:\d(?:_?\d)*)?\.\d(?:_?\d)*)'
-             r'([eE][+-]?\d(?:_?\d)*)?', Number.Float),
-            (r'\d(?:_?\d)*[eE][+-]?\d(?:_?\d)*j?', Number.Float),
-            (r'0[oO](?:_?[0-7])+', Number.Oct),
-            (r'0[bB](?:_?[01])+', Number.Bin),
-            (r'0[xX](?:_?[a-fA-F0-9])+', Number.Hex),
-            (r'\d(?:_?\d)*', Number.Integer),
-        ],
-        'fstringescape': [
-            include('stringescape'),
-        ],
-        'bytesescape': [
-            (r'\\([\\bfnrt"\']|\n|x[a-fA-F0-9]{2}|[0-7]{1,3})', String.Escape)
-        ],
-        'stringescape': [
-            (r'\\(N\{.*?\}|u\{[a-fA-F0-9]{1,6}\})', String.Escape),
-            include('bytesescape')
-        ],
-        'fstrings-single': fstring_rules(String.Single),
-        'fstrings-double': fstring_rules(String.Double),
-        'strings-single': innerstring_rules(String.Single),
-        'strings-double': innerstring_rules(String.Double),
-        'dqf': [
-            (r'"', String.Double, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape),  # included here for raw strings
-            include('fstrings-double')
-        ],
-        'sqf': [
-            (r"'", String.Single, '#pop'),
-            (r"\\\\|\\'|\\\n", String.Escape),  # included here for raw strings
-            include('fstrings-single')
-        ],
-        'dqs': [
-            (r'"', String.Double, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape),  # included here for raw strings
-            include('strings-double')
-        ],
-        'sqs': [
-            (r"'", String.Single, '#pop'),
-            (r"\\\\|\\'|\\\n", String.Escape),  # included here for raw strings
-            include('strings-single')
-        ],
-        'tdqf': [
-            (r'"""', String.Double, '#pop'),
-            include('fstrings-double'),
-            (r'\n', String.Double)
-        ],
-        'tsqf': [
-            (r"'''", String.Single, '#pop'),
-            include('fstrings-single'),
-            (r'\n', String.Single)
-        ],
-        'tdqs': [
-            (r'"""', String.Double, '#pop'),
-            include('strings-double'),
-            (r'\n', String.Double)
-        ],
-        'tsqs': [
-            (r"'''", String.Single, '#pop'),
-            include('strings-single'),
-            (r'\n', String.Single)
-        ],
-
-        'expr-inside-fstring': [
-            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
-            # without format specifier
-            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
-             r'\}', String.Interpol, '#pop'),
-            # with format specifier
-            # we'll catch the remaining '}' in the outer scope
-            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
-             r':', String.Interpol, '#pop'),
-            (r'\s+', Whitespace),  # allow new lines
-            include('expr'),
-        ],
-        'expr-inside-fstring-inner': [
-            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
-            (r'[])}]', Punctuation, '#pop'),
-            (r'\s+', Whitespace),  # allow new lines
-            include('expr'),
-        ],
-        'keywords': [
-            (words((
-                'into', 'case', 'type', 'module', 'internal',
-            ), suffix=r'\b'),
-                Keyword),
-            (words(('true', 'false', 'null'), suffix=r'\b'), Keyword.Constant),
-        ],
-        'functions': [
-            (words((
-                "min", "max", "sum", "average", "stddev", "every", "any",
-                "concat_array", "count", "lag", "lead", "first", "last",
-                "rank", "rank_dense", "row_number", "round", "as", "in",
-                "tuple_every", "tuple_map", "tuple_zip", "_eq", "_is_null",
-                "from_text", "lower", "upper", "read_parquet", "read_csv"),
-                suffix=r'\b'),
-             Name.Function),
-        ],
-
-        'comment': [
-            (r'-(?!\})', Comment.Multiline),
-            (r'\{-', Comment.Multiline, 'comment'),
-            (r'[^-}]', Comment.Multiline),
-            (r'-\}', Comment.Multiline, '#pop'),
-        ],
-
-        'imports': [
-            (r'\w+(\.\w+)*', Name.Class, '#pop'),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ptx.py b/venv/lib/python3.11/site-packages/pygments/lexers/ptx.py
deleted file mode 100644
index 218d694..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ptx.py
+++ /dev/null
@@ -1,120 +0,0 @@
-"""
-    pygments.lexers.ptx
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexer for other PTX language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Comment, Keyword, Name, String, Number, \
-    Punctuation, Whitespace, Operator
-
-__all__ = ["PtxLexer"]
-
-
-class PtxLexer(RegexLexer):
-    """
-    For NVIDIA `PTX `_
-    source.
-
-    .. versionadded:: 2.16
-    """
-    name = 'PTX'
-    url = "https://docs.nvidia.com/cuda/parallel-thread-execution/"
-    filenames = ['*.ptx']
-    aliases = ['ptx']
-    mimetypes = ['text/x-ptx']
-
-    #: optional Comment or Whitespace
-    string = r'"[^"]*?"'
-    followsym = r'[a-zA-Z0-9_$]'
-    identifier = r'([-a-zA-Z$._][\w\-$.]*|' + string + ')'
-    block_label = r'(' + identifier + r'|(\d+))'
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-
-            (block_label + r'\s*:', Name.Label),
-
-            include('keyword'),
-
-            (r'%' + identifier, Name.Variable),
-            (r'%\d+', Name.Variable.Anonymous),
-            (r'c?' + string, String),
-            (identifier, Name.Variable),
-            (r';', Punctuation),
-            (r'[*+-/]', Operator),
-
-            (r'0[xX][a-fA-F0-9]+', Number),
-            (r'-?\d+(?:[.]\d+)?(?:[eE][-+]?\d+(?:[.]\d+)?)?', Number),
-
-            (r'[=<>{}\[\]()*.,!]|x\b', Punctuation)
-
-        ],
-        'whitespace': [
-            (r'(\n|\s+)+', Whitespace),
-            (r'//.*?\n', Comment)
-        ],
-
-        'keyword': [
-            # Instruction keywords
-            (words((
-                'abs', 'discard', 'min', 'shf', 'vadd',
-                'activemask', 'div', 'mma', 'shfl', 'vadd2',
-                'add', 'dp2a', 'mov', 'shl', 'vadd4',
-                'addc', 'dp4a', 'movmatrix', 'shr', 'vavrg2',
-                'alloca', 'elect', 'mul', 'sin', 'vavrg4',
-                'and', 'ex2', 'mul24', 'slct', 'vmad',
-                'applypriority', 'exit', 'multimem', 'sqrt', 'vmax',
-                'atom', 'fence', 'nanosleep', 'st', 'vmax2',
-                'bar', 'fma', 'neg', 'stackrestore', 'vmax4',
-                'barrier', 'fns', 'not', 'stacksave', 'vmin',
-                'bfe', 'getctarank', 'or', 'stmatrix', 'vmin2',
-                'bfi', 'griddepcontrol', 'pmevent', 'sub', 'vmin4',
-                'bfind', 'isspacep', 'popc', 'subc', 'vote',
-                'bmsk', 'istypep', 'prefetch', 'suld', 'vset',
-                'bra', 'ld', 'prefetchu', 'suq', 'vset2',
-                'brev', 'ldmatrix', 'prmt', 'sured', 'vset4',
-                'brkpt', 'ldu', 'rcp', 'sust', 'vshl',
-                'brx', 'lg2', 'red', 'szext', 'vshr',
-                'call', 'lop3', 'redux', 'tanh', 'vsub',
-                'clz', 'mad', 'rem', 'testp', 'vsub2',
-                'cnot', 'mad24', 'ret', 'tex', 'vsub4',
-                'copysign', 'madc', 'rsqrt', 'tld4', 'wgmma',
-                'cos', 'mapa', 'sad', 'trap', 'wmma',
-                'cp', 'match', 'selp', 'txq', 'xor',
-                'createpolicy', 'max', 'set', 'vabsdiff', 'cvt',
-                'mbarrier', 'setmaxnreg', 'vabsdiff2', 'cvta',
-                'membar', 'setp', 'vabsdiff4')), Keyword),
-            # State Spaces and Suffixes
-            (words((
-                'reg', '.sreg', '.const', '.global',
-                '.local', '.param', '.shared', '.tex',
-                '.wide', '.loc'
-            )), Keyword.Pseudo),
-            # PTX Directives
-            (words((
-                '.address_size', '.explicitcluster', '.maxnreg', '.section',
-                '.alias', '.extern', '.maxntid', '.shared',
-                '.align', '.file', '.minnctapersm', '.sreg',
-                '.branchtargets', '.func', '.noreturn', '.target',
-                '.callprototype', '.global', '.param', '.tex',
-                '.calltargets', '.loc', '.pragma', '.version',
-                '.common', '.local', '.reg', '.visible',
-                '.const', '.maxclusterrank', '.reqnctapercluster', '.weak',
-                '.entry', '.maxnctapersm', '.reqntid')), Keyword.Reserved),
-            # Fundamental Types
-            (words((
-                '.s8', '.s16', '.s32', '.s64',
-                '.u8', '.u16', '.u32', '.u64',
-                '.f16', '.f16x2', '.f32', '.f64',
-                '.b8', '.b16', '.b32', '.b64',
-                '.pred'
-            )), Keyword.Type)
-        ],
-
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/python.py b/venv/lib/python3.11/site-packages/pygments/lexers/python.py
deleted file mode 100644
index cdb88ab..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/python.py
+++ /dev/null
@@ -1,1198 +0,0 @@
-"""
-    pygments.lexers.python
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Python and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-import keyword
-
-from pygments.lexer import DelegatingLexer, Lexer, RegexLexer, include, \
-    bygroups, using, default, words, combined, do_insertions, this, line_re
-from pygments.util import get_bool_opt, shebang_matches
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Generic, Other, Error, Whitespace
-from pygments import unistring as uni
-
-__all__ = ['PythonLexer', 'PythonConsoleLexer', 'PythonTracebackLexer',
-           'Python2Lexer', 'Python2TracebackLexer',
-           'CythonLexer', 'DgLexer', 'NumPyLexer']
-
-
-class PythonLexer(RegexLexer):
-    """
-    For Python source code (version 3.x).
-
-    .. versionadded:: 0.10
-
-    .. versionchanged:: 2.5
-       This is now the default ``PythonLexer``.  It is still available as the
-       alias ``Python3Lexer``.
-    """
-
-    name = 'Python'
-    url = 'https://www.python.org'
-    aliases = ['python', 'py', 'sage', 'python3', 'py3', 'bazel', 'starlark']
-    filenames = [
-        '*.py',
-        '*.pyw',
-        # Type stubs
-        '*.pyi',
-        # Jython
-        '*.jy',
-        # Sage
-        '*.sage',
-        # SCons
-        '*.sc',
-        'SConstruct',
-        'SConscript',
-        # Skylark/Starlark (used by Bazel, Buck, and Pants)
-        '*.bzl',
-        'BUCK',
-        'BUILD',
-        'BUILD.bazel',
-        'WORKSPACE',
-        # Twisted Application infrastructure
-        '*.tac',
-    ]
-    mimetypes = ['text/x-python', 'application/x-python',
-                 'text/x-python3', 'application/x-python3']
-
-    uni_name = "[%s][%s]*" % (uni.xid_start, uni.xid_continue)
-
-    def innerstring_rules(ttype):
-        return [
-            # the old style '%s' % (...) string formatting (still valid in Py3)
-            (r'%(\(\w+\))?[-#0 +]*([0-9]+|[*])?(\.([0-9]+|[*]))?'
-             '[hlL]?[E-GXc-giorsaux%]', String.Interpol),
-            # the new style '{}'.format(...) string formatting
-            (r'\{'
-             r'((\w+)((\.\w+)|(\[[^\]]+\]))*)?'  # field name
-             r'(\![sra])?'                       # conversion
-             r'(\:(.?[<>=\^])?[-+ ]?#?0?(\d+)?,?(\.\d+)?[E-GXb-gnosx%]?)?'
-             r'\}', String.Interpol),
-
-            # backslashes, quotes and formatting signs must be parsed one at a time
-            (r'[^\\\'"%{\n]+', ttype),
-            (r'[\'"\\]', ttype),
-            # unhandled string formatting sign
-            (r'%|(\{{1,2})', ttype)
-            # newlines are an error (use "nl" state)
-        ]
-
-    def fstring_rules(ttype):
-        return [
-            # Assuming that a '}' is the closing brace after format specifier.
-            # Sadly, this means that we won't detect syntax error. But it's
-            # more important to parse correct syntax correctly, than to
-            # highlight invalid syntax.
-            (r'\}', String.Interpol),
-            (r'\{', String.Interpol, 'expr-inside-fstring'),
-            # backslashes, quotes and formatting signs must be parsed one at a time
-            (r'[^\\\'"{}\n]+', ttype),
-            (r'[\'"\\]', ttype),
-            # newlines are an error (use "nl" state)
-        ]
-
-    tokens = {
-        'root': [
-            (r'\n', Whitespace),
-            (r'^(\s*)([rRuUbB]{,2})("""(?:.|\n)*?""")',
-             bygroups(Whitespace, String.Affix, String.Doc)),
-            (r"^(\s*)([rRuUbB]{,2})('''(?:.|\n)*?''')",
-             bygroups(Whitespace, String.Affix, String.Doc)),
-            (r'\A#!.+$', Comment.Hashbang),
-            (r'#.*$', Comment.Single),
-            (r'\\\n', Text),
-            (r'\\', Text),
-            include('keywords'),
-            include('soft-keywords'),
-            (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
-            (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
-            (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
-             'fromimport'),
-            (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
-             'import'),
-            include('expr'),
-        ],
-        'expr': [
-            # raw f-strings
-            ('(?i)(rf|fr)(""")',
-             bygroups(String.Affix, String.Double),
-             combined('rfstringescape', 'tdqf')),
-            ("(?i)(rf|fr)(''')",
-             bygroups(String.Affix, String.Single),
-             combined('rfstringescape', 'tsqf')),
-            ('(?i)(rf|fr)(")',
-             bygroups(String.Affix, String.Double),
-             combined('rfstringescape', 'dqf')),
-            ("(?i)(rf|fr)(')",
-             bygroups(String.Affix, String.Single),
-             combined('rfstringescape', 'sqf')),
-            # non-raw f-strings
-            ('([fF])(""")', bygroups(String.Affix, String.Double),
-             combined('fstringescape', 'tdqf')),
-            ("([fF])(''')", bygroups(String.Affix, String.Single),
-             combined('fstringescape', 'tsqf')),
-            ('([fF])(")', bygroups(String.Affix, String.Double),
-             combined('fstringescape', 'dqf')),
-            ("([fF])(')", bygroups(String.Affix, String.Single),
-             combined('fstringescape', 'sqf')),
-            # raw bytes and strings
-            ('(?i)(rb|br|r)(""")',
-             bygroups(String.Affix, String.Double), 'tdqs'),
-            ("(?i)(rb|br|r)(''')",
-             bygroups(String.Affix, String.Single), 'tsqs'),
-            ('(?i)(rb|br|r)(")',
-             bygroups(String.Affix, String.Double), 'dqs'),
-            ("(?i)(rb|br|r)(')",
-             bygroups(String.Affix, String.Single), 'sqs'),
-            # non-raw strings
-            ('([uU]?)(""")', bygroups(String.Affix, String.Double),
-             combined('stringescape', 'tdqs')),
-            ("([uU]?)(''')", bygroups(String.Affix, String.Single),
-             combined('stringescape', 'tsqs')),
-            ('([uU]?)(")', bygroups(String.Affix, String.Double),
-             combined('stringescape', 'dqs')),
-            ("([uU]?)(')", bygroups(String.Affix, String.Single),
-             combined('stringescape', 'sqs')),
-            # non-raw bytes
-            ('([bB])(""")', bygroups(String.Affix, String.Double),
-             combined('bytesescape', 'tdqs')),
-            ("([bB])(''')", bygroups(String.Affix, String.Single),
-             combined('bytesescape', 'tsqs')),
-            ('([bB])(")', bygroups(String.Affix, String.Double),
-             combined('bytesescape', 'dqs')),
-            ("([bB])(')", bygroups(String.Affix, String.Single),
-             combined('bytesescape', 'sqs')),
-
-            (r'[^\S\n]+', Text),
-            include('numbers'),
-            (r'!=|==|<<|>>|:=|[-~+/*%=<>&^|.]', Operator),
-            (r'[]{}:(),;[]', Punctuation),
-            (r'(in|is|and|or|not)\b', Operator.Word),
-            include('expr-keywords'),
-            include('builtins'),
-            include('magicfuncs'),
-            include('magicvars'),
-            include('name'),
-        ],
-        'expr-inside-fstring': [
-            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
-            # without format specifier
-            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
-             r'(\![sraf])?'     # conversion
-             r'\}', String.Interpol, '#pop'),
-            # with format specifier
-            # we'll catch the remaining '}' in the outer scope
-            (r'(=\s*)?'         # debug (https://bugs.python.org/issue36817)
-             r'(\![sraf])?'     # conversion
-             r':', String.Interpol, '#pop'),
-            (r'\s+', Whitespace),  # allow new lines
-            include('expr'),
-        ],
-        'expr-inside-fstring-inner': [
-            (r'[{([]', Punctuation, 'expr-inside-fstring-inner'),
-            (r'[])}]', Punctuation, '#pop'),
-            (r'\s+', Whitespace),  # allow new lines
-            include('expr'),
-        ],
-        'expr-keywords': [
-            # Based on https://docs.python.org/3/reference/expressions.html
-            (words((
-                'async for', 'await', 'else', 'for', 'if', 'lambda',
-                'yield', 'yield from'), suffix=r'\b'),
-             Keyword),
-            (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
-        ],
-        'keywords': [
-            (words((
-                'assert', 'async', 'await', 'break', 'continue', 'del', 'elif',
-                'else', 'except', 'finally', 'for', 'global', 'if', 'lambda',
-                'pass', 'raise', 'nonlocal', 'return', 'try', 'while', 'yield',
-                'yield from', 'as', 'with'), suffix=r'\b'),
-             Keyword),
-            (words(('True', 'False', 'None'), suffix=r'\b'), Keyword.Constant),
-        ],
-        'soft-keywords': [
-            # `match`, `case` and `_` soft keywords
-            (r'(^[ \t]*)'              # at beginning of line + possible indentation
-             r'(match|case)\b'         # a possible keyword
-             r'(?![ \t]*(?:'           # not followed by...
-             r'[:,;=^&|@~)\]}]|(?:' +  # characters and keywords that mean this isn't
-             r'|'.join(keyword.kwlist) + r')\b))',                 # pattern matching
-             bygroups(Text, Keyword), 'soft-keywords-inner'),
-        ],
-        'soft-keywords-inner': [
-            # optional `_` keyword
-            (r'(\s+)([^\n_]*)(_\b)', bygroups(Whitespace, using(this), Keyword)),
-            default('#pop')
-        ],
-        'builtins': [
-            (words((
-                '__import__', 'abs', 'aiter', 'all', 'any', 'bin', 'bool', 'bytearray',
-                'breakpoint', 'bytes', 'callable', 'chr', 'classmethod', 'compile',
-                'complex', 'delattr', 'dict', 'dir', 'divmod', 'enumerate', 'eval',
-                'filter', 'float', 'format', 'frozenset', 'getattr', 'globals',
-                'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'isinstance',
-                'issubclass', 'iter', 'len', 'list', 'locals', 'map', 'max',
-                'memoryview', 'min', 'next', 'object', 'oct', 'open', 'ord', 'pow',
-                'print', 'property', 'range', 'repr', 'reversed', 'round', 'set',
-                'setattr', 'slice', 'sorted', 'staticmethod', 'str', 'sum', 'super',
-                'tuple', 'type', 'vars', 'zip'), prefix=r'(?>|[-~+/*%=<>&^|.]', Operator),
-            include('keywords'),
-            (r'(def)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'funcname'),
-            (r'(class)((?:\s|\\\s)+)', bygroups(Keyword, Text), 'classname'),
-            (r'(from)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
-             'fromimport'),
-            (r'(import)((?:\s|\\\s)+)', bygroups(Keyword.Namespace, Text),
-             'import'),
-            include('builtins'),
-            include('magicfuncs'),
-            include('magicvars'),
-            include('backtick'),
-            ('([rR]|[uUbB][rR]|[rR][uUbB])(""")',
-             bygroups(String.Affix, String.Double), 'tdqs'),
-            ("([rR]|[uUbB][rR]|[rR][uUbB])(''')",
-             bygroups(String.Affix, String.Single), 'tsqs'),
-            ('([rR]|[uUbB][rR]|[rR][uUbB])(")',
-             bygroups(String.Affix, String.Double), 'dqs'),
-            ("([rR]|[uUbB][rR]|[rR][uUbB])(')",
-             bygroups(String.Affix, String.Single), 'sqs'),
-            ('([uUbB]?)(""")', bygroups(String.Affix, String.Double),
-             combined('stringescape', 'tdqs')),
-            ("([uUbB]?)(''')", bygroups(String.Affix, String.Single),
-             combined('stringescape', 'tsqs')),
-            ('([uUbB]?)(")', bygroups(String.Affix, String.Double),
-             combined('stringescape', 'dqs')),
-            ("([uUbB]?)(')", bygroups(String.Affix, String.Single),
-             combined('stringescape', 'sqs')),
-            include('name'),
-            include('numbers'),
-        ],
-        'keywords': [
-            (words((
-                'assert', 'break', 'continue', 'del', 'elif', 'else', 'except',
-                'exec', 'finally', 'for', 'global', 'if', 'lambda', 'pass',
-                'print', 'raise', 'return', 'try', 'while', 'yield',
-                'yield from', 'as', 'with'), suffix=r'\b'),
-             Keyword),
-        ],
-        'builtins': [
-            (words((
-                '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin',
-                'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr', 'classmethod',
-                'cmp', 'coerce', 'compile', 'complex', 'delattr', 'dict', 'dir', 'divmod',
-                'enumerate', 'eval', 'execfile', 'exit', 'file', 'filter', 'float',
-                'frozenset', 'getattr', 'globals', 'hasattr', 'hash', 'hex', 'id',
-                'input', 'int', 'intern', 'isinstance', 'issubclass', 'iter', 'len',
-                'list', 'locals', 'long', 'map', 'max', 'min', 'next', 'object',
-                'oct', 'open', 'ord', 'pow', 'property', 'range', 'raw_input', 'reduce',
-                'reload', 'repr', 'reversed', 'round', 'set', 'setattr', 'slice',
-                'sorted', 'staticmethod', 'str', 'sum', 'super', 'tuple', 'type',
-                'unichr', 'unicode', 'vars', 'xrange', 'zip'),
-                prefix=r'(?>> )(.*\n)', bygroups(Generic.Prompt, Other.Code), 'continuations'),
-            # This happens, e.g., when tracebacks are embedded in documentation;
-            # trailing whitespaces are often stripped in such contexts.
-            (r'(>>>)(\n)', bygroups(Generic.Prompt, Whitespace)),
-            (r'(\^C)?Traceback \(most recent call last\):\n', Other.Traceback, 'traceback'),
-            # SyntaxError starts with this
-            (r'  File "[^"]+", line \d+', Other.Traceback, 'traceback'),
-            (r'.*\n', Generic.Output),
-        ],
-        'continuations': [
-            (r'(\.\.\. )(.*\n)', bygroups(Generic.Prompt, Other.Code)),
-            # See above.
-            (r'(\.\.\.)(\n)', bygroups(Generic.Prompt, Whitespace)),
-            default('#pop'),
-        ],
-        'traceback': [
-            # As soon as we see a traceback, consume everything until the next
-            # >>> prompt.
-            (r'(?=>>>( |$))', Text, '#pop'),
-            (r'(KeyboardInterrupt)(\n)', bygroups(Name.Class, Whitespace)),
-            (r'.*\n', Other.Traceback),
-        ],
-    }
-
-class PythonConsoleLexer(DelegatingLexer):
-    """
-    For Python console output or doctests, such as:
-
-    .. sourcecode:: pycon
-
-        >>> a = 'foo'
-        >>> print(a)
-        foo
-        >>> 1 / 0
-        Traceback (most recent call last):
-          File "", line 1, in 
-        ZeroDivisionError: integer division or modulo by zero
-
-    Additional options:
-
-    `python3`
-        Use Python 3 lexer for code.  Default is ``True``.
-
-        .. versionadded:: 1.0
-        .. versionchanged:: 2.5
-           Now defaults to ``True``.
-    """
-
-    name = 'Python console session'
-    aliases = ['pycon']
-    mimetypes = ['text/x-python-doctest']
-
-    def __init__(self, **options):
-        python3 = get_bool_opt(options, 'python3', True)
-        if python3:
-            pylexer = PythonLexer
-            tblexer = PythonTracebackLexer
-        else:
-            pylexer = Python2Lexer
-            tblexer = Python2TracebackLexer
-        # We have two auxiliary lexers. Use DelegatingLexer twice with
-        # different tokens.  TODO: DelegatingLexer should support this
-        # directly, by accepting a tuplet of auxiliary lexers and a tuple of
-        # distinguishing tokens. Then we wouldn't need this intermediary
-        # class.
-        class _ReplaceInnerCode(DelegatingLexer):
-            def __init__(self, **options):
-                super().__init__(pylexer, _PythonConsoleLexerBase, Other.Code, **options)
-        super().__init__(tblexer, _ReplaceInnerCode, Other.Traceback, **options)
-
-class PythonTracebackLexer(RegexLexer):
-    """
-    For Python 3.x tracebacks, with support for chained exceptions.
-
-    .. versionadded:: 1.0
-
-    .. versionchanged:: 2.5
-       This is now the default ``PythonTracebackLexer``.  It is still available
-       as the alias ``Python3TracebackLexer``.
-    """
-
-    name = 'Python Traceback'
-    aliases = ['pytb', 'py3tb']
-    filenames = ['*.pytb', '*.py3tb']
-    mimetypes = ['text/x-python-traceback', 'text/x-python3-traceback']
-
-    tokens = {
-        'root': [
-            (r'\n', Whitespace),
-            (r'^(\^C)?Traceback \(most recent call last\):\n', Generic.Traceback, 'intb'),
-            (r'^During handling of the above exception, another '
-             r'exception occurred:\n\n', Generic.Traceback),
-            (r'^The above exception was the direct cause of the '
-             r'following exception:\n\n', Generic.Traceback),
-            (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
-            (r'^.*\n', Other),
-        ],
-        'intb': [
-            (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
-            (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
-            (r'^(    )(.+)(\n)',
-             bygroups(Whitespace, using(PythonLexer), Whitespace), 'markers'),
-            (r'^([ \t]*)(\.\.\.)(\n)',
-             bygroups(Whitespace, Comment, Whitespace)),  # for doctests...
-            (r'^([^:]+)(: )(.+)(\n)',
-             bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
-            (r'^([a-zA-Z_][\w.]*)(:?\n)',
-             bygroups(Generic.Error, Whitespace), '#pop'),
-            default('#pop'),
-        ],
-        'markers': [
-            # Either `PEP 657 `
-            # error locations in Python 3.11+, or single-caret markers
-            # for syntax errors before that.
-            (r'^( {4,})([~^]+)(\n)',
-             bygroups(Whitespace, Punctuation.Marker, Whitespace),
-             '#pop'),
-            default('#pop'),
-        ],
-    }
-
-
-Python3TracebackLexer = PythonTracebackLexer
-
-
-class Python2TracebackLexer(RegexLexer):
-    """
-    For Python tracebacks.
-
-    .. versionadded:: 0.7
-
-    .. versionchanged:: 2.5
-       This class has been renamed from ``PythonTracebackLexer``.
-       ``PythonTracebackLexer`` now refers to the Python 3 variant.
-    """
-
-    name = 'Python 2.x Traceback'
-    aliases = ['py2tb']
-    filenames = ['*.py2tb']
-    mimetypes = ['text/x-python2-traceback']
-
-    tokens = {
-        'root': [
-            # Cover both (most recent call last) and (innermost last)
-            # The optional ^C allows us to catch keyboard interrupt signals.
-            (r'^(\^C)?(Traceback.*\n)',
-             bygroups(Text, Generic.Traceback), 'intb'),
-            # SyntaxError starts with this.
-            (r'^(?=  File "[^"]+", line \d+)', Generic.Traceback, 'intb'),
-            (r'^.*\n', Other),
-        ],
-        'intb': [
-            (r'^(  File )("[^"]+")(, line )(\d+)(, in )(.+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Text, Name, Whitespace)),
-            (r'^(  File )("[^"]+")(, line )(\d+)(\n)',
-             bygroups(Text, Name.Builtin, Text, Number, Whitespace)),
-            (r'^(    )(.+)(\n)',
-             bygroups(Text, using(Python2Lexer), Whitespace), 'marker'),
-            (r'^([ \t]*)(\.\.\.)(\n)',
-             bygroups(Text, Comment, Whitespace)),  # for doctests...
-            (r'^([^:]+)(: )(.+)(\n)',
-             bygroups(Generic.Error, Text, Name, Whitespace), '#pop'),
-            (r'^([a-zA-Z_]\w*)(:?\n)',
-             bygroups(Generic.Error, Whitespace), '#pop')
-        ],
-        'marker': [
-            # For syntax errors.
-            (r'( {4,})(\^)', bygroups(Text, Punctuation.Marker), '#pop'),
-            default('#pop'),
-        ],
-    }
-
-
-class CythonLexer(RegexLexer):
-    """
-    For Pyrex and Cython source code.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Cython'
-    url = 'https://cython.org'
-    aliases = ['cython', 'pyx', 'pyrex']
-    filenames = ['*.pyx', '*.pxd', '*.pxi']
-    mimetypes = ['text/x-cython', 'application/x-cython']
-
-    tokens = {
-        'root': [
-            (r'\n', Whitespace),
-            (r'^(\s*)("""(?:.|\n)*?""")', bygroups(Whitespace, String.Doc)),
-            (r"^(\s*)('''(?:.|\n)*?''')", bygroups(Whitespace, String.Doc)),
-            (r'[^\S\n]+', Text),
-            (r'#.*$', Comment),
-            (r'[]{}:(),;[]', Punctuation),
-            (r'\\\n', Whitespace),
-            (r'\\', Text),
-            (r'(in|is|and|or|not)\b', Operator.Word),
-            (r'(<)([a-zA-Z0-9.?]+)(>)',
-             bygroups(Punctuation, Keyword.Type, Punctuation)),
-            (r'!=|==|<<|>>|[-~+/*%=<>&^|.?]', Operator),
-            (r'(from)(\d+)(<=)(\s+)(<)(\d+)(:)',
-             bygroups(Keyword, Number.Integer, Operator, Name, Operator,
-                      Name, Punctuation)),
-            include('keywords'),
-            (r'(def|property)(\s+)', bygroups(Keyword, Text), 'funcname'),
-            (r'(cp?def)(\s+)', bygroups(Keyword, Text), 'cdef'),
-            # (should actually start a block with only cdefs)
-            (r'(cdef)(:)', bygroups(Keyword, Punctuation)),
-            (r'(class|struct)(\s+)', bygroups(Keyword, Text), 'classname'),
-            (r'(from)(\s+)', bygroups(Keyword, Text), 'fromimport'),
-            (r'(c?import)(\s+)', bygroups(Keyword, Text), 'import'),
-            include('builtins'),
-            include('backtick'),
-            ('(?:[rR]|[uU][rR]|[rR][uU])"""', String, 'tdqs'),
-            ("(?:[rR]|[uU][rR]|[rR][uU])'''", String, 'tsqs'),
-            ('(?:[rR]|[uU][rR]|[rR][uU])"', String, 'dqs'),
-            ("(?:[rR]|[uU][rR]|[rR][uU])'", String, 'sqs'),
-            ('[uU]?"""', String, combined('stringescape', 'tdqs')),
-            ("[uU]?'''", String, combined('stringescape', 'tsqs')),
-            ('[uU]?"', String, combined('stringescape', 'dqs')),
-            ("[uU]?'", String, combined('stringescape', 'sqs')),
-            include('name'),
-            include('numbers'),
-        ],
-        'keywords': [
-            (words((
-                'assert', 'async', 'await', 'break', 'by', 'continue', 'ctypedef', 'del', 'elif',
-                'else', 'except', 'except?', 'exec', 'finally', 'for', 'fused', 'gil',
-                'global', 'if', 'include', 'lambda', 'nogil', 'pass', 'print',
-                'raise', 'return', 'try', 'while', 'yield', 'as', 'with'), suffix=r'\b'),
-             Keyword),
-            (r'(DEF|IF|ELIF|ELSE)\b', Comment.Preproc),
-        ],
-        'builtins': [
-            (words((
-                '__import__', 'abs', 'all', 'any', 'apply', 'basestring', 'bin', 'bint',
-                'bool', 'buffer', 'bytearray', 'bytes', 'callable', 'chr',
-                'classmethod', 'cmp', 'coerce', 'compile', 'complex', 'delattr',
-                'dict', 'dir', 'divmod', 'enumerate', 'eval', 'execfile', 'exit',
-                'file', 'filter', 'float', 'frozenset', 'getattr', 'globals',
-                'hasattr', 'hash', 'hex', 'id', 'input', 'int', 'intern', 'isinstance',
-                'issubclass', 'iter', 'len', 'list', 'locals', 'long', 'map', 'max',
-                'min', 'next', 'object', 'oct', 'open', 'ord', 'pow', 'property', 'Py_ssize_t',
-                'range', 'raw_input', 'reduce', 'reload', 'repr', 'reversed',
-                'round', 'set', 'setattr', 'slice', 'sorted', 'staticmethod',
-                'str', 'sum', 'super', 'tuple', 'type', 'unichr', 'unicode', 'unsigned',
-                'vars', 'xrange', 'zip'), prefix=r'(?`_ source code.
-
-    .. versionadded:: 2.12
-    """
-
-    name = "K"
-    aliases = ["k"]
-    filenames = ["*.k"]
-
-    tokens = {
-        "whitespace": [
-            # hashbang script
-            (r"^#!.*", Comment.Hashbang),
-            # Comments
-            (r"^/\s*\n", Comment.Multiline, "comments"),
-            (r"(??/\\:']?:)(\s*)(\{)",
-             bygroups(Name.Function, Whitespace, Operator, Whitespace, Punctuation),
-             "functions"),
-            # Variable Names
-            (r"([.]?[a-zA-Z][\w.]*)(\s*)([-.~=!@#$%^&*_+|,<>?/\\:']?:)",
-             bygroups(Name.Variable, Whitespace, Operator)),
-            # Functions
-            (r"\{", Punctuation, "functions"),
-            # Parentheses
-            (r"\(", Punctuation, "parentheses"),
-            # Brackets
-            (r"\[", Punctuation, "brackets"),
-            # Errors
-            (r"'`([a-zA-Z][\w.]*)?", Name.Exception),
-            # File Symbols
-            (r"`:([a-zA-Z/][\w./]*)?", String.Symbol),
-            # Symbols
-            (r"`([a-zA-Z][\w.]*)?", String.Symbol),
-            # Numbers
-            include("numbers"),
-            # Variable Names
-            (r"[a-zA-Z][\w.]*", Name),
-            # Operators
-            (r"[-=+*#$%@!~^&:.,<>'\\|/?_]", Operator),
-            # Punctuation
-            (r";", Punctuation),
-        ],
-        "functions": [
-            include("root"),
-            (r"\}", Punctuation, "#pop"),
-        ],
-        "parentheses": [
-            include("root"),
-            (r"\)", Punctuation, "#pop"),
-        ],
-        "brackets": [
-            include("root"),
-            (r"\]", Punctuation, "#pop"),
-        ],
-        "numbers": [
-            # Binary Values
-            (r"[01]+b", Number.Bin),
-            # Nulls/Infinities
-            (r"0[nNwW][cefghijmndzuvtp]?", Number),
-            # Timestamps
-            ((r"(?:[0-9]{4}[.][0-9]{2}[.][0-9]{2}|[0-9]+)"
-              "D(?:[0-9](?:[0-9](?::[0-9]{2}"
-              "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)?"), Literal.Date),
-            # Datetimes
-            ((r"[0-9]{4}[.][0-9]{2}"
-              "(?:m|[.][0-9]{2}(?:T(?:[0-9]{2}:[0-9]{2}"
-              "(?::[0-9]{2}(?:[.][0-9]*)?)?)?)?)"), Literal.Date),
-            # Times
-            (r"[0-9]{2}:[0-9]{2}(?::[0-9]{2}(?:[.][0-9]{1,3})?)?",
-             Literal.Date),
-            # GUIDs
-            (r"[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}",
-             Number.Hex),
-            # Byte Vectors
-            (r"0x[0-9a-fA-F]+", Number.Hex),
-            # Floats
-            (r"([0-9]*[.]?[0-9]+|[0-9]+[.]?[0-9]*)[eE][+-]?[0-9]+[ef]?",
-             Number.Float),
-            (r"([0-9]*[.][0-9]+|[0-9]+[.][0-9]*)[ef]?", Number.Float),
-            (r"[0-9]+[ef]", Number.Float),
-            # Characters
-            (r"[0-9]+c", Number),
-            # Integers
-            (r"[0-9]+[ihtuv]", Number.Integer),
-            # Long Integers
-            (r"[0-9]+[jnp]?", Number.Integer.Long),
-        ],
-        "comments": [
-            (r"[^\\]+", Comment.Multiline),
-            (r"^\\", Comment.Multiline, "#pop"),
-            (r"\\", Comment.Multiline),
-        ],
-        "strings": [
-            (r'[^"\\]+', String.Double),
-            (r"\\.", String.Escape),
-            (r'"', String.Double, "#pop"),
-        ],
-    }
-
-
-class QLexer(KLexer):
-    """
-    For `Q `_ source code.
-
-    .. versionadded:: 2.12
-    """
-
-    name = "Q"
-    aliases = ["q"]
-    filenames = ["*.q"]
-
-    tokens = {
-        "root": [
-            (words(("aj", "aj0", "ajf", "ajf0", "all", "and", "any", "asc",
-                    "asof", "attr", "avgs", "ceiling", "cols", "count", "cross",
-                    "csv", "cut", "deltas", "desc", "differ", "distinct", "dsave",
-                    "each", "ej", "ema", "eval", "except", "fby", "fills", "first",
-                    "fkeys", "flip", "floor", "get", "group", "gtime", "hclose",
-                    "hcount", "hdel", "hsym", "iasc", "idesc", "ij", "ijf",
-                    "inter", "inv", "key", "keys", "lj", "ljf", "load", "lower",
-                    "lsq", "ltime", "ltrim", "mavg", "maxs", "mcount", "md5",
-                    "mdev", "med", "meta", "mins", "mmax", "mmin", "mmu", "mod",
-                    "msum", "neg", "next", "not", "null", "or", "over", "parse",
-                    "peach", "pj", "prds", "prior", "prev", "rand", "rank", "ratios",
-                    "raze", "read0", "read1", "reciprocal", "reval", "reverse",
-                    "rload", "rotate", "rsave", "rtrim", "save", "scan", "scov",
-                    "sdev", "set", "show", "signum", "ssr", "string", "sublist",
-                    "sums", "sv", "svar", "system", "tables", "til", "trim", "txf",
-                    "type", "uj", "ujf", "ungroup", "union", "upper", "upsert",
-                    "value", "view", "views", "vs", "where", "wj", "wj1", "ww",
-                    "xasc", "xbar", "xcol", "xcols", "xdesc", "xgroup", "xkey",
-                    "xlog", "xprev", "xrank"),
-                    suffix=r"\b"), Name.Builtin,
-            ),
-            inherit,
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/qlik.py b/venv/lib/python3.11/site-packages/pygments/lexers/qlik.py
deleted file mode 100644
index b265b60..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/qlik.py
+++ /dev/null
@@ -1,117 +0,0 @@
-"""
-    pygments.lexers.qlik
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for the qlik scripting language
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
-    Punctuation, String, Text
-from pygments.lexers._qlik_builtins import OPERATORS_LIST, STATEMENT_LIST, \
-    SCRIPT_FUNCTIONS, CONSTANT_LIST
-
-__all__ = ["QlikLexer"]
-
-
-class QlikLexer(RegexLexer):
-    """
-    Lexer for qlik code, including .qvs files
-
-    .. versionadded:: 2.12
-    """
-
-    name = "Qlik"
-    aliases = ["qlik", "qlikview", "qliksense", "qlikscript"]
-    filenames = ["*.qvs", "*.qvw"]
-
-    flags = re.IGNORECASE
-
-    tokens = {
-        # Handle multi-line comments
-        "comment": [
-            (r"\*/", Comment.Multiline, "#pop"),
-            (r"[^*]+", Comment.Multiline),
-        ],
-        # Handle numbers
-        "numerics": [
-            (r"\b\d+\.\d+(e\d+)?[fd]?\b", Number.Float),
-            (r"\b\d+\b", Number.Integer),
-        ],
-        # Handle variable names in things
-        "interp": [
-            (
-                r"(\$\()(\w+)(\))",
-                bygroups(String.Interpol, Name.Variable, String.Interpol),
-            ),
-        ],
-        # Handle strings
-        "string": [
-            (r"'", String, "#pop"),
-            include("interp"),
-            (r"[^'$]+", String),
-            (r"\$", String),
-        ],
-        #
-        "assignment": [
-            (r";", Punctuation, "#pop"),
-            include("root"),
-        ],
-        "field_name_quote": [
-            (r'"', String.Symbol, "#pop"),
-            include("interp"),
-            (r"[^\"$]+", String.Symbol),
-            (r"\$", String.Symbol),
-        ],
-        "field_name_bracket": [
-            (r"\]", String.Symbol, "#pop"),
-            include("interp"),
-            (r"[^\]$]+", String.Symbol),
-            (r"\$", String.Symbol),
-        ],
-        "function": [(r"\)", Punctuation, "#pop"), include("root")],
-        "root": [
-            # Whitespace and comments
-            (r"\s+", Text.Whitespace),
-            (r"/\*", Comment.Multiline, "comment"),
-            (r"//.*\n", Comment.Single),
-            # variable assignment
-            (r"(let|set)(\s+)", bygroups(Keyword.Declaration, Text.Whitespace),
-             "assignment"),
-            # Word operators
-            (words(OPERATORS_LIST["words"], prefix=r"\b", suffix=r"\b"),
-             Operator.Word),
-            # Statements
-            (words(STATEMENT_LIST, suffix=r"\b"), Keyword),
-            # Table names
-            (r"[a-z]\w*:", Keyword.Declaration),
-            # Constants
-            (words(CONSTANT_LIST, suffix=r"\b"), Keyword.Constant),
-            # Functions
-            (words(SCRIPT_FUNCTIONS, suffix=r"(?=\s*\()"), Name.Builtin,
-             "function"),
-            # interpolation - e.g. $(variableName)
-            include("interp"),
-            # Quotes denote a field/file name
-            (r'"', String.Symbol, "field_name_quote"),
-            # Square brackets denote a field/file name
-            (r"\[", String.Symbol, "field_name_bracket"),
-            # Strings
-            (r"'", String, "string"),
-            # Numbers
-            include("numerics"),
-            # Operator symbols
-            (words(OPERATORS_LIST["symbols"]), Operator),
-            # Strings denoted by single quotes
-            (r"'.+?'", String),
-            # Words as text
-            (r"\b\w+\b", Text),
-            # Basic punctuation
-            (r"[,;.()\\/]", Punctuation),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/qvt.py b/venv/lib/python3.11/site-packages/pygments/lexers/qvt.py
deleted file mode 100644
index dc329f7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/qvt.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""
-    pygments.lexers.qvt
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexer for QVT Operational language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, combined, default, \
-    words
-from pygments.token import Text, Comment, Operator, Keyword, Punctuation, \
-    Name, String, Number
-
-__all__ = ['QVToLexer']
-
-
-class QVToLexer(RegexLexer):
-    """
-    For the `QVT Operational Mapping language `_.
-
-    Reference for implementing this: «Meta Object Facility (MOF) 2.0
-    Query/View/Transformation Specification», Version 1.1 - January 2011
-    (http://www.omg.org/spec/QVT/1.1/), see §8.4, «Concrete Syntax» in
-    particular.
-
-    Notable tokens assignments:
-
-    - Name.Class is assigned to the identifier following any of the following
-      keywords: metamodel, class, exception, primitive, enum, transformation
-      or library
-
-    - Name.Function is assigned to the names of mappings and queries
-
-    - Name.Builtin.Pseudo is assigned to the pre-defined variables 'this',
-      'self' and 'result'.
-    """
-    # With obvious borrowings & inspiration from the Java, Python and C lexers
-
-    name = 'QVTO'
-    aliases = ['qvto', 'qvt']
-    filenames = ['*.qvto']
-
-    tokens = {
-        'root': [
-            (r'\n', Text),
-            (r'[^\S\n]+', Text),
-            (r'(--|//)(\s*)(directive:)?(.*)$',
-             bygroups(Comment, Comment, Comment.Preproc, Comment)),
-            # Uncomment the following if you want to distinguish between
-            # '/*' and '/**', à la javadoc
-            # (r'/[*]{2}(.|\n)*?[*]/', Comment.Multiline),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-            (r'\\\n', Text),
-            (r'(and|not|or|xor|##?)\b', Operator.Word),
-            (r'(:{1,2}=|[-+]=)\b', Operator.Word),
-            (r'(@|<<|>>)\b', Keyword),  # stereotypes
-            (r'!=|<>|==|=|!->|->|>=|<=|[.]{3}|[+/*%=<>&|.~]', Operator),
-            (r'[]{}:(),;[]', Punctuation),
-            (r'(true|false|unlimited|null)\b', Keyword.Constant),
-            (r'(this|self|result)\b', Name.Builtin.Pseudo),
-            (r'(var)\b', Keyword.Declaration),
-            (r'(from|import)\b', Keyword.Namespace, 'fromimport'),
-            (r'(metamodel|class|exception|primitive|enum|transformation|'
-             r'library)(\s+)(\w+)',
-             bygroups(Keyword.Word, Text, Name.Class)),
-            (r'(exception)(\s+)(\w+)',
-             bygroups(Keyword.Word, Text, Name.Exception)),
-            (r'(main)\b', Name.Function),
-            (r'(mapping|helper|query)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'operation'),
-            (r'(assert)(\s+)\b', bygroups(Keyword, Text), 'assert'),
-            (r'(Bag|Collection|Dict|OrderedSet|Sequence|Set|Tuple|List)\b',
-             Keyword.Type),
-            include('keywords'),
-            ('"', String, combined('stringescape', 'dqs')),
-            ("'", String, combined('stringescape', 'sqs')),
-            include('name'),
-            include('numbers'),
-            # (r'([a-zA-Z_]\w*)(::)([a-zA-Z_]\w*)',
-            # bygroups(Text, Text, Text)),
-        ],
-
-        'fromimport': [
-            (r'(?:[ \t]|\\\n)+', Text),
-            (r'[a-zA-Z_][\w.]*', Name.Namespace),
-            default('#pop'),
-        ],
-
-        'operation': [
-            (r'::', Text),
-            (r'(.*::)([a-zA-Z_]\w*)([ \t]*)(\()',
-             bygroups(Text, Name.Function, Text, Punctuation), '#pop')
-        ],
-
-        'assert': [
-            (r'(warning|error|fatal)\b', Keyword, '#pop'),
-            default('#pop'),  # all else: go back
-        ],
-
-        'keywords': [
-            (words((
-                'abstract', 'access', 'any', 'assert', 'blackbox', 'break',
-                'case', 'collect', 'collectNested', 'collectOne', 'collectselect',
-                'collectselectOne', 'composes', 'compute', 'configuration',
-                'constructor', 'continue', 'datatype', 'default', 'derived',
-                'disjuncts', 'do', 'elif', 'else', 'end', 'endif', 'except',
-                'exists', 'extends', 'forAll', 'forEach', 'forOne', 'from', 'if',
-                'implies', 'in', 'inherits', 'init', 'inout', 'intermediate',
-                'invresolve', 'invresolveIn', 'invresolveone', 'invresolveoneIn',
-                'isUnique', 'iterate', 'late', 'let', 'literal', 'log', 'map',
-                'merges', 'modeltype', 'new', 'object', 'one', 'ordered', 'out',
-                'package', 'population', 'property', 'raise', 'readonly',
-                'references', 'refines', 'reject', 'resolve', 'resolveIn',
-                'resolveone', 'resolveoneIn', 'return', 'select', 'selectOne',
-                'sortedBy', 'static', 'switch', 'tag', 'then', 'try', 'typedef',
-                'unlimited', 'uses', 'when', 'where', 'while', 'with', 'xcollect',
-                'xmap', 'xselect'), suffix=r'\b'), Keyword),
-        ],
-
-        # There is no need to distinguish between String.Single and
-        # String.Double: 'strings' is factorised for 'dqs' and 'sqs'
-        'strings': [
-            (r'[^\\\'"\n]+', String),
-            # quotes, percents and backslashes must be parsed one at a time
-            (r'[\'"\\]', String),
-        ],
-        'stringescape': [
-            (r'\\([\\btnfr"\']|u[0-3][0-7]{2}|u[0-7]{1,2})', String.Escape)
-        ],
-        'dqs': [  # double-quoted string
-            (r'"', String, '#pop'),
-            (r'\\\\|\\"', String.Escape),
-            include('strings')
-        ],
-        'sqs': [  # single-quoted string
-            (r"'", String, '#pop'),
-            (r"\\\\|\\'", String.Escape),
-            include('strings')
-        ],
-        'name': [
-            (r'[a-zA-Z_]\w*', Name),
-        ],
-        # numbers: excerpt taken from the python lexer
-        'numbers': [
-            (r'(\d+\.\d*|\d*\.\d+)([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+[eE][+-]?[0-9]+', Number.Float),
-            (r'\d+', Number.Integer)
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/r.py b/venv/lib/python3.11/site-packages/pygments/lexers/r.py
deleted file mode 100644
index ed62fa2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/r.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-    pygments.lexers.r
-    ~~~~~~~~~~~~~~~~~
-
-    Lexers for the R/S languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, include, do_insertions
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Generic, Whitespace
-
-__all__ = ['RConsoleLexer', 'SLexer', 'RdLexer']
-
-
-line_re  = re.compile('.*?\n')
-
-
-class RConsoleLexer(Lexer):
-    """
-    For R console transcripts or R CMD BATCH output files.
-    """
-
-    name = 'RConsole'
-    aliases = ['rconsole', 'rout']
-    filenames = ['*.Rout']
-
-    def get_tokens_unprocessed(self, text):
-        slexer = SLexer(**self.options)
-
-        current_code_block = ''
-        insertions = []
-
-        for match in line_re.finditer(text):
-            line = match.group()
-            if line.startswith('>') or line.startswith('+'):
-                # Colorize the prompt as such,
-                # then put rest of line into current_code_block
-                insertions.append((len(current_code_block),
-                                   [(0, Generic.Prompt, line[:2])]))
-                current_code_block += line[2:]
-            else:
-                # We have reached a non-prompt line!
-                # If we have stored prompt lines, need to process them first.
-                if current_code_block:
-                    # Weave together the prompts and highlight code.
-                    yield from do_insertions(
-                        insertions, slexer.get_tokens_unprocessed(current_code_block))
-                    # Reset vars for next code block.
-                    current_code_block = ''
-                    insertions = []
-                # Now process the actual line itself, this is output from R.
-                yield match.start(), Generic.Output, line
-
-        # If we happen to end on a code block with nothing after it, need to
-        # process the last code block. This is neither elegant nor DRY so
-        # should be changed.
-        if current_code_block:
-            yield from do_insertions(
-                insertions, slexer.get_tokens_unprocessed(current_code_block))
-
-
-class SLexer(RegexLexer):
-    """
-    For S, S-plus, and R source code.
-
-    .. versionadded:: 0.10
-    """
-
-    name = 'S'
-    aliases = ['splus', 's', 'r']
-    filenames = ['*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron']
-    mimetypes = ['text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r',
-                 'text/x-R', 'text/x-r-history', 'text/x-r-profile']
-
-    valid_name = r'`[^`\\]*(?:\\.[^`\\]*)*`|(?:[a-zA-Z]|\.[A-Za-z_.])[\w.]*|\.'
-    tokens = {
-        'comments': [
-            (r'#.*$', Comment.Single),
-        ],
-        'valid_name': [
-            (valid_name, Name),
-        ],
-        'punctuation': [
-            (r'\[{1,2}|\]{1,2}|\(|\)|;|,', Punctuation),
-        ],
-        'keywords': [
-            (r'(if|else|for|while|repeat|in|next|break|return|switch|function)'
-             r'(?![\w.])',
-             Keyword.Reserved),
-        ],
-        'operators': [
-            (r'<>?|-|==|<=|>=|<|>|&&?|!=|\|\|?|\?', Operator),
-            (r'\*|\+|\^|/|!|%[^%]*%|=|~|\$|@|:{1,3}', Operator),
-        ],
-        'builtin_symbols': [
-            (r'(NULL|NA(_(integer|real|complex|character)_)?|'
-             r'letters|LETTERS|Inf|TRUE|FALSE|NaN|pi|\.\.(\.|[0-9]+))'
-             r'(?![\w.])',
-             Keyword.Constant),
-            (r'(T|F)\b', Name.Builtin.Pseudo),
-        ],
-        'numbers': [
-            # hex number
-            (r'0[xX][a-fA-F0-9]+([pP][0-9]+)?[Li]?', Number.Hex),
-            # decimal number
-            (r'[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)([eE][+-]?[0-9]+)?[Li]?',
-             Number),
-        ],
-        'statements': [
-            include('comments'),
-            # whitespaces
-            (r'\s+', Whitespace),
-            (r'\'', String, 'string_squote'),
-            (r'\"', String, 'string_dquote'),
-            include('builtin_symbols'),
-            include('valid_name'),
-            include('numbers'),
-            include('keywords'),
-            include('punctuation'),
-            include('operators'),
-        ],
-        'root': [
-            # calls:
-            (r'(%s)\s*(?=\()' % valid_name, Name.Function),
-            include('statements'),
-            # blocks:
-            (r'\{|\}', Punctuation),
-            # (r'\{', Punctuation, 'block'),
-            (r'.', Text),
-        ],
-        # 'block': [
-        #    include('statements'),
-        #    ('\{', Punctuation, '#push'),
-        #    ('\}', Punctuation, '#pop')
-        # ],
-        'string_squote': [
-            (r'([^\'\\]|\\.)*\'', String, '#pop'),
-        ],
-        'string_dquote': [
-            (r'([^"\\]|\\.)*"', String, '#pop'),
-        ],
-    }
-
-    def analyse_text(text):
-        if re.search(r'[a-z0-9_\])\s]<-(?!-)', text):
-            return 0.11
-
-
-class RdLexer(RegexLexer):
-    """
-    Pygments Lexer for R documentation (Rd) files
-
-    This is a very minimal implementation, highlighting little more
-    than the macros. A description of Rd syntax is found in `Writing R
-    Extensions `_
-    and `Parsing Rd files `_.
-
-    .. versionadded:: 1.6
-    """
-    name = 'Rd'
-    aliases = ['rd']
-    filenames = ['*.Rd']
-    mimetypes = ['text/x-r-doc']
-
-    # To account for verbatim / LaTeX-like / and R-like areas
-    # would require parsing.
-    tokens = {
-        'root': [
-            # catch escaped brackets and percent sign
-            (r'\\[\\{}%]', String.Escape),
-            # comments
-            (r'%.*$', Comment),
-            # special macros with no arguments
-            (r'\\(?:cr|l?dots|R|tab)\b', Keyword.Constant),
-            # macros
-            (r'\\[a-zA-Z]+\b', Keyword),
-            # special preprocessor macros
-            (r'^\s*#(?:ifn?def|endif).*\b', Comment.Preproc),
-            # non-escaped brackets
-            (r'[{}]', Name.Builtin),
-            # everything else
-            (r'[^\\%\n{}]+', Text),
-            (r'.', Text),
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rdf.py b/venv/lib/python3.11/site-packages/pygments/lexers/rdf.py
deleted file mode 100644
index c4fb998..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rdf.py
+++ /dev/null
@@ -1,466 +0,0 @@
-"""
-    pygments.lexers.rdf
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for semantic web and RDF query languages and markup.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, default
-from pygments.token import Keyword, Punctuation, String, Number, Operator, \
-    Generic, Whitespace, Name, Literal, Comment, Text
-
-__all__ = ['SparqlLexer', 'TurtleLexer', 'ShExCLexer']
-
-
-class SparqlLexer(RegexLexer):
-    """
-    Lexer for `SPARQL `_ query language.
-
-    .. versionadded:: 2.0
-    """
-    name = 'SPARQL'
-    aliases = ['sparql']
-    filenames = ['*.rq', '*.sparql']
-    mimetypes = ['application/sparql-query']
-
-    # character group definitions ::
-
-    PN_CHARS_BASE_GRP = ('a-zA-Z'
-                         '\u00c0-\u00d6'
-                         '\u00d8-\u00f6'
-                         '\u00f8-\u02ff'
-                         '\u0370-\u037d'
-                         '\u037f-\u1fff'
-                         '\u200c-\u200d'
-                         '\u2070-\u218f'
-                         '\u2c00-\u2fef'
-                         '\u3001-\ud7ff'
-                         '\uf900-\ufdcf'
-                         '\ufdf0-\ufffd')
-
-    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
-    PN_CHARS_GRP = (PN_CHARS_U_GRP +
-                    r'\-' +
-                    r'0-9' +
-                    '\u00b7' +
-                    '\u0300-\u036f' +
-                    '\u203f-\u2040')
-
-    HEX_GRP = '0-9A-Fa-f'
-
-    PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
-
-    # terminal productions ::
-
-    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
-    PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
-    PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
-    HEX = '[' + HEX_GRP + ']'
-
-    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
-    IRIREF = r'<(?:[^<>"{}|^`\\\x00-\x20])*>'
-
-    BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
-                       '.]*' + PN_CHARS + ')?'
-
-    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
-    VARNAME = '[0-9' + PN_CHARS_U_GRP + '][' + PN_CHARS_U_GRP + \
-              '0-9\u00b7\u0300-\u036f\u203f-\u2040]*'
-
-    PERCENT = '%' + HEX + HEX
-
-    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
-    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
-    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
-                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
-                PN_CHARS_GRP + ':]|' + PLX + '))?')
-
-    EXPONENT = r'[eE][+-]?\d+'
-
-    # Lexer token definitions ::
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            # keywords ::
-            (r'(?i)(select|construct|describe|ask|where|filter|group\s+by|minus|'
-             r'distinct|reduced|from\s+named|from|order\s+by|desc|asc|limit|'
-             r'offset|values|bindings|load|into|clear|drop|create|add|move|copy|'
-             r'insert\s+data|delete\s+data|delete\s+where|with|delete|insert|'
-             r'using\s+named|using|graph|default|named|all|optional|service|'
-             r'silent|bind|undef|union|not\s+in|in|as|having|to|prefix|base)\b', Keyword),
-            (r'(a)\b', Keyword),
-            # IRIs ::
-            ('(' + IRIREF + ')', Name.Label),
-            # blank nodes ::
-            ('(' + BLANK_NODE_LABEL + ')', Name.Label),
-            #  # variables ::
-            ('[?$]' + VARNAME, Name.Variable),
-            # prefixed names ::
-            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
-             bygroups(Name.Namespace, Punctuation, Name.Tag)),
-            # function names ::
-            (r'(?i)(str|lang|langmatches|datatype|bound|iri|uri|bnode|rand|abs|'
-             r'ceil|floor|round|concat|strlen|ucase|lcase|encode_for_uri|'
-             r'contains|strstarts|strends|strbefore|strafter|year|month|day|'
-             r'hours|minutes|seconds|timezone|tz|now|uuid|struuid|md5|sha1|sha256|sha384|'
-             r'sha512|coalesce|if|strlang|strdt|sameterm|isiri|isuri|isblank|'
-             r'isliteral|isnumeric|regex|substr|replace|exists|not\s+exists|'
-             r'count|sum|min|max|avg|sample|group_concat|separator)\b',
-             Name.Function),
-            # boolean literals ::
-            (r'(true|false)', Keyword.Constant),
-            # double literals ::
-            (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
-            # decimal literals ::
-            (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
-            # integer literals ::
-            (r'[+\-]?\d+', Number.Integer),
-            # operators ::
-            (r'(\|\||&&|=|\*|\-|\+|/|!=|<=|>=|!|<|>)', Operator),
-            # punctuation characters ::
-            (r'[(){}.;,:^\[\]]', Punctuation),
-            # line comments ::
-            (r'#[^\n]*', Comment),
-            # strings ::
-            (r'"""', String, 'triple-double-quoted-string'),
-            (r'"', String, 'single-double-quoted-string'),
-            (r"'''", String, 'triple-single-quoted-string'),
-            (r"'", String, 'single-single-quoted-string'),
-        ],
-        'triple-double-quoted-string': [
-            (r'"""', String, 'end-of-string'),
-            (r'[^\\]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'single-double-quoted-string': [
-            (r'"', String, 'end-of-string'),
-            (r'[^"\\\n]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'triple-single-quoted-string': [
-            (r"'''", String, 'end-of-string'),
-            (r'[^\\]+', String),
-            (r'\\', String.Escape, 'string-escape'),
-        ],
-        'single-single-quoted-string': [
-            (r"'", String, 'end-of-string'),
-            (r"[^'\\\n]+", String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'string-escape': [
-            (r'u' + HEX + '{4}', String.Escape, '#pop'),
-            (r'U' + HEX + '{8}', String.Escape, '#pop'),
-            (r'.', String.Escape, '#pop'),
-        ],
-        'end-of-string': [
-            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
-             bygroups(Operator, Name.Function), '#pop:2'),
-            (r'\^\^', Operator, '#pop:2'),
-            default('#pop:2'),
-        ],
-    }
-
-
-class TurtleLexer(RegexLexer):
-    """
-    Lexer for `Turtle `_ data language.
-
-    .. versionadded:: 2.1
-    """
-    name = 'Turtle'
-    aliases = ['turtle']
-    filenames = ['*.ttl']
-    mimetypes = ['text/turtle', 'application/x-turtle']
-
-    # character group definitions ::
-    PN_CHARS_BASE_GRP = ('a-zA-Z'
-                         '\u00c0-\u00d6'
-                         '\u00d8-\u00f6'
-                         '\u00f8-\u02ff'
-                         '\u0370-\u037d'
-                         '\u037f-\u1fff'
-                         '\u200c-\u200d'
-                         '\u2070-\u218f'
-                         '\u2c00-\u2fef'
-                         '\u3001-\ud7ff'
-                         '\uf900-\ufdcf'
-                         '\ufdf0-\ufffd')
-
-    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
-    PN_CHARS_GRP = (PN_CHARS_U_GRP +
-                    r'\-' +
-                    r'0-9' +
-                    '\u00b7' +
-                    '\u0300-\u036f' +
-                    '\u203f-\u2040')
-
-    PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
-    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
-    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
-    HEX_GRP = '0-9A-Fa-f'
-
-    HEX = '[' + HEX_GRP + ']'
-
-    PERCENT = '%' + HEX + HEX
-
-    PN_LOCAL_ESC_CHARS_GRP = r' _~.\-!$&"()*+,;=/?#@%'
-
-    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
-    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
-    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
-    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
-                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
-                PN_CHARS_GRP + ':]|' + PLX + '))?')
-
-    patterns = {
-        'PNAME_NS': r'((?:[a-zA-Z][\w-]*)?\:)',  # Simplified character range
-        'IRIREF': r'(<[^<>"{}|^`\\\x00-\x20]*>)'
-    }
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-
-            # Base / prefix
-            (r'(@base|BASE)(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
-             bygroups(Keyword, Whitespace, Name.Variable, Whitespace,
-                      Punctuation)),
-            (r'(@prefix|PREFIX)(\s+)%(PNAME_NS)s(\s+)%(IRIREF)s(\s*)(\.?)' % patterns,
-             bygroups(Keyword, Whitespace, Name.Namespace, Whitespace,
-                      Name.Variable, Whitespace, Punctuation)),
-
-            # The shorthand predicate 'a'
-            (r'(?<=\s)a(?=\s)', Keyword.Type),
-
-            # IRIREF
-            (r'%(IRIREF)s' % patterns, Name.Variable),
-
-            # PrefixedName
-            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + r')?',
-             bygroups(Name.Namespace, Punctuation, Name.Tag)),
-
-            # BlankNodeLabel
-            (r'(_)(:)([' + PN_CHARS_U_GRP + r'0-9]([' + PN_CHARS_GRP + r'.]*' + PN_CHARS + ')?)',
-             bygroups(Name.Namespace, Punctuation, Name.Tag)),
-
-            # Comment
-            (r'#[^\n]+', Comment),
-
-            (r'\b(true|false)\b', Literal),
-            (r'[+\-]?\d*\.\d+', Number.Float),
-            (r'[+\-]?\d*(:?\.\d+)?E[+\-]?\d+', Number.Float),
-            (r'[+\-]?\d+', Number.Integer),
-            (r'[\[\](){}.;,:^]', Punctuation),
-
-            (r'"""', String, 'triple-double-quoted-string'),
-            (r'"', String, 'single-double-quoted-string'),
-            (r"'''", String, 'triple-single-quoted-string'),
-            (r"'", String, 'single-single-quoted-string'),
-        ],
-        'triple-double-quoted-string': [
-            (r'"""', String, 'end-of-string'),
-            (r'[^\\]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'single-double-quoted-string': [
-            (r'"', String, 'end-of-string'),
-            (r'[^"\\\n]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'triple-single-quoted-string': [
-            (r"'''", String, 'end-of-string'),
-            (r'[^\\]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'single-single-quoted-string': [
-            (r"'", String, 'end-of-string'),
-            (r"[^'\\\n]+", String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'string-escape': [
-            (r'.', String, '#pop'),
-        ],
-        'end-of-string': [
-            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
-             bygroups(Operator, Generic.Emph), '#pop:2'),
-
-            (r'(\^\^)%(IRIREF)s' % patterns, bygroups(Operator, Generic.Emph), '#pop:2'),
-
-            default('#pop:2'),
-
-        ],
-    }
-
-    # Turtle and Tera Term macro files share the same file extension
-    # but each has a recognizable and distinct syntax.
-    def analyse_text(text):
-        for t in ('@base ', 'BASE ', '@prefix ', 'PREFIX '):
-            if re.search(r'^\s*%s' % t, text):
-                return 0.80
-
-
-class ShExCLexer(RegexLexer):
-    """
-    Lexer for `ShExC `_ shape expressions language syntax.
-    """
-    name = 'ShExC'
-    aliases = ['shexc', 'shex']
-    filenames = ['*.shex']
-    mimetypes = ['text/shex']
-
-    # character group definitions ::
-
-    PN_CHARS_BASE_GRP = ('a-zA-Z'
-                         '\u00c0-\u00d6'
-                         '\u00d8-\u00f6'
-                         '\u00f8-\u02ff'
-                         '\u0370-\u037d'
-                         '\u037f-\u1fff'
-                         '\u200c-\u200d'
-                         '\u2070-\u218f'
-                         '\u2c00-\u2fef'
-                         '\u3001-\ud7ff'
-                         '\uf900-\ufdcf'
-                         '\ufdf0-\ufffd')
-
-    PN_CHARS_U_GRP = (PN_CHARS_BASE_GRP + '_')
-
-    PN_CHARS_GRP = (PN_CHARS_U_GRP +
-                    r'\-' +
-                    r'0-9' +
-                    '\u00b7' +
-                    '\u0300-\u036f' +
-                    '\u203f-\u2040')
-
-    HEX_GRP = '0-9A-Fa-f'
-
-    PN_LOCAL_ESC_CHARS_GRP = r"_~.\-!$&'()*+,;=/?#@%"
-
-    # terminal productions ::
-
-    PN_CHARS_BASE = '[' + PN_CHARS_BASE_GRP + ']'
-
-    PN_CHARS_U = '[' + PN_CHARS_U_GRP + ']'
-
-    PN_CHARS = '[' + PN_CHARS_GRP + ']'
-
-    HEX = '[' + HEX_GRP + ']'
-
-    PN_LOCAL_ESC_CHARS = '[' + PN_LOCAL_ESC_CHARS_GRP + ']'
-
-    UCHAR_NO_BACKSLASH = '(?:u' + HEX + '{4}|U' + HEX + '{8})'
-
-    UCHAR = r'\\' + UCHAR_NO_BACKSLASH
-
-    IRIREF = r'<(?:[^\x00-\x20<>"{}|^`\\]|' + UCHAR + ')*>'
-
-    BLANK_NODE_LABEL = '_:[0-9' + PN_CHARS_U_GRP + '](?:[' + PN_CHARS_GRP + \
-                       '.]*' + PN_CHARS + ')?'
-
-    PN_PREFIX = PN_CHARS_BASE + '(?:[' + PN_CHARS_GRP + '.]*' + PN_CHARS + ')?'
-
-    PERCENT = '%' + HEX + HEX
-
-    PN_LOCAL_ESC = r'\\' + PN_LOCAL_ESC_CHARS
-
-    PLX = '(?:' + PERCENT + ')|(?:' + PN_LOCAL_ESC + ')'
-
-    PN_LOCAL = ('(?:[' + PN_CHARS_U_GRP + ':0-9' + ']|' + PLX + ')' +
-                '(?:(?:[' + PN_CHARS_GRP + '.:]|' + PLX + ')*(?:[' +
-                PN_CHARS_GRP + ':]|' + PLX + '))?')
-
-    EXPONENT = r'[eE][+-]?\d+'
-
-    # Lexer token definitions ::
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            # keywords ::
-            (r'(?i)(base|prefix|start|external|'
-             r'literal|iri|bnode|nonliteral|length|minlength|maxlength|'
-             r'mininclusive|minexclusive|maxinclusive|maxexclusive|'
-             r'totaldigits|fractiondigits|'
-             r'closed|extra)\b', Keyword),
-            (r'(a)\b', Keyword),
-            # IRIs ::
-            ('(' + IRIREF + ')', Name.Label),
-            # blank nodes ::
-            ('(' + BLANK_NODE_LABEL + ')', Name.Label),
-            # prefixed names ::
-            (r'(' + PN_PREFIX + r')?(\:)(' + PN_LOCAL + ')?',
-             bygroups(Name.Namespace, Punctuation, Name.Tag)),
-            # boolean literals ::
-            (r'(true|false)', Keyword.Constant),
-            # double literals ::
-            (r'[+\-]?(\d+\.\d*' + EXPONENT + r'|\.?\d+' + EXPONENT + ')', Number.Float),
-            # decimal literals ::
-            (r'[+\-]?(\d+\.\d*|\.\d+)', Number.Float),
-            # integer literals ::
-            (r'[+\-]?\d+', Number.Integer),
-            # operators ::
-            (r'[@|$&=*+?^\-~]', Operator),
-            # operator keywords ::
-            (r'(?i)(and|or|not)\b', Operator.Word),
-            # punctuation characters ::
-            (r'[(){}.;,:^\[\]]', Punctuation),
-            # line comments ::
-            (r'#[^\n]*', Comment),
-            # strings ::
-            (r'"""', String, 'triple-double-quoted-string'),
-            (r'"', String, 'single-double-quoted-string'),
-            (r"'''", String, 'triple-single-quoted-string'),
-            (r"'", String, 'single-single-quoted-string'),
-        ],
-        'triple-double-quoted-string': [
-            (r'"""', String, 'end-of-string'),
-            (r'[^\\]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'single-double-quoted-string': [
-            (r'"', String, 'end-of-string'),
-            (r'[^"\\\n]+', String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'triple-single-quoted-string': [
-            (r"'''", String, 'end-of-string'),
-            (r'[^\\]+', String),
-            (r'\\', String.Escape, 'string-escape'),
-        ],
-        'single-single-quoted-string': [
-            (r"'", String, 'end-of-string'),
-            (r"[^'\\\n]+", String),
-            (r'\\', String, 'string-escape'),
-        ],
-        'string-escape': [
-            (UCHAR_NO_BACKSLASH, String.Escape, '#pop'),
-            (r'.', String.Escape, '#pop'),
-        ],
-        'end-of-string': [
-            (r'(@)([a-zA-Z]+(?:-[a-zA-Z0-9]+)*)',
-             bygroups(Operator, Name.Function), '#pop:2'),
-            (r'\^\^', Operator, '#pop:2'),
-            default('#pop:2'),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rebol.py b/venv/lib/python3.11/site-packages/pygments/lexers/rebol.py
deleted file mode 100644
index 6170f0c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rebol.py
+++ /dev/null
@@ -1,419 +0,0 @@
-"""
-    pygments.lexers.rebol
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the REBOL and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Generic, Whitespace
-
-__all__ = ['RebolLexer', 'RedLexer']
-
-
-class RebolLexer(RegexLexer):
-    """
-    A `REBOL `_ lexer.
-
-    .. versionadded:: 1.1
-    """
-    name = 'REBOL'
-    aliases = ['rebol']
-    filenames = ['*.r', '*.r3', '*.reb']
-    mimetypes = ['text/x-rebol']
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
-    def word_callback(lexer, match):
-        word = match.group()
-
-        if re.match(".*:$", word):
-            yield match.start(), Generic.Subheading, word
-        elif re.match(
-            r'(native|alias|all|any|as-string|as-binary|bind|bound\?|case|'
-            r'catch|checksum|comment|debase|dehex|exclude|difference|disarm|'
-            r'either|else|enbase|foreach|remove-each|form|free|get|get-env|if|'
-            r'in|intersect|loop|minimum-of|maximum-of|mold|new-line|'
-            r'new-line\?|not|now|prin|print|reduce|compose|construct|repeat|'
-            r'reverse|save|script\?|set|shift|switch|throw|to-hex|trace|try|'
-            r'type\?|union|unique|unless|unprotect|unset|until|use|value\?|'
-            r'while|compress|decompress|secure|open|close|read|read-io|'
-            r'write-io|write|update|query|wait|input\?|exp|log-10|log-2|'
-            r'log-e|square-root|cosine|sine|tangent|arccosine|arcsine|'
-            r'arctangent|protect|lowercase|uppercase|entab|detab|connected\?|'
-            r'browse|launch|stats|get-modes|set-modes|to-local-file|'
-            r'to-rebol-file|encloak|decloak|create-link|do-browser|bind\?|'
-            r'hide|draw|show|size-text|textinfo|offset-to-caret|'
-            r'caret-to-offset|local-request-file|rgb-to-hsv|hsv-to-rgb|'
-            r'crypt-strength\?|dh-make-key|dh-generate-key|dh-compute-key|'
-            r'dsa-make-key|dsa-generate-key|dsa-make-signature|'
-            r'dsa-verify-signature|rsa-make-key|rsa-generate-key|'
-            r'rsa-encrypt)$', word):
-            yield match.start(), Name.Builtin, word
-        elif re.match(
-            r'(add|subtract|multiply|divide|remainder|power|and~|or~|xor~|'
-            r'minimum|maximum|negate|complement|absolute|random|head|tail|'
-            r'next|back|skip|at|pick|first|second|third|fourth|fifth|sixth|'
-            r'seventh|eighth|ninth|tenth|last|path|find|select|make|to|copy\*|'
-            r'insert|remove|change|poke|clear|trim|sort|min|max|abs|cp|'
-            r'copy)$', word):
-            yield match.start(), Name.Function, word
-        elif re.match(
-            r'(error|source|input|license|help|install|echo|Usage|with|func|'
-            r'throw-on-error|function|does|has|context|probe|\?\?|as-pair|'
-            r'mod|modulo|round|repend|about|set-net|append|join|rejoin|reform|'
-            r'remold|charset|array|replace|move|extract|forskip|forall|alter|'
-            r'first+|also|take|for|forever|dispatch|attempt|what-dir|'
-            r'change-dir|clean-path|list-dir|dirize|rename|split-path|delete|'
-            r'make-dir|delete-dir|in-dir|confirm|dump-obj|upgrade|what|'
-            r'build-tag|process-source|build-markup|decode-cgi|read-cgi|'
-            r'write-user|save-user|set-user-name|protect-system|parse-xml|'
-            r'cvs-date|cvs-version|do-boot|get-net-info|desktop|layout|'
-            r'scroll-para|get-face|alert|set-face|uninstall|unfocus|'
-            r'request-dir|center-face|do-events|net-error|decode-url|'
-            r'parse-header|parse-header-date|parse-email-addrs|import-email|'
-            r'send|build-attach-body|resend|show-popup|hide-popup|open-events|'
-            r'find-key-face|do-face|viewtop|confine|find-window|'
-            r'insert-event-func|remove-event-func|inform|dump-pane|dump-face|'
-            r'flag-face|deflag-face|clear-fields|read-net|vbug|path-thru|'
-            r'read-thru|load-thru|do-thru|launch-thru|load-image|'
-            r'request-download|do-face-alt|set-font|set-para|get-style|'
-            r'set-style|make-face|stylize|choose|hilight-text|hilight-all|'
-            r'unlight-text|focus|scroll-drag|clear-face|reset-face|scroll-face|'
-            r'resize-face|load-stock|load-stock-block|notify|request|flash|'
-            r'request-color|request-pass|request-text|request-list|'
-            r'request-date|request-file|dbug|editor|link-relative-path|'
-            r'emailer|parse-error)$', word):
-            yield match.start(), Keyword.Namespace, word
-        elif re.match(
-            r'(halt|quit|do|load|q|recycle|call|run|ask|parse|view|unview|'
-            r'return|exit|break)$', word):
-            yield match.start(), Name.Exception, word
-        elif re.match('REBOL$', word):
-            yield match.start(), Generic.Heading, word
-        elif re.match("to-.*", word):
-            yield match.start(), Keyword, word
-        elif re.match(r'(\+|-|\*|/|//|\*\*|and|or|xor|=\?|=|==|<>|<|>|<=|>=)$',
-                      word):
-            yield match.start(), Operator, word
-        elif re.match(r".*\?$", word):
-            yield match.start(), Keyword, word
-        elif re.match(r".*\!$", word):
-            yield match.start(), Keyword.Type, word
-        elif re.match("'.*", word):
-            yield match.start(), Name.Variable.Instance, word  # lit-word
-        elif re.match("#.*", word):
-            yield match.start(), Name.Label, word  # issue
-        elif re.match("%.*", word):
-            yield match.start(), Name.Decorator, word  # file
-        else:
-            yield match.start(), Name.Variable, word
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'#"', String.Char, 'char'),
-            (r'#\{[0-9a-f]*\}', Number.Hex),
-            (r'2#\{', Number.Hex, 'bin2'),
-            (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
-            (r'"', String, 'string'),
-            (r'\{', String, 'string2'),
-            (r';#+.*\n', Comment.Special),
-            (r';\*+.*\n', Comment.Preproc),
-            (r';.*\n', Comment),
-            (r'%"', Name.Decorator, 'stringFile'),
-            (r'%[^(^{")\s\[\]]+', Name.Decorator),
-            (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float),  # money
-            (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other),    # time
-            (r'\d+[\-/][0-9a-z]+[\-/]\d+(\/\d+\:\d+((\:\d+)?'
-             r'([.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other),   # date
-            (r'\d+(\.\d+)+\.\d+', Keyword.Constant),             # tuple
-            (r'\d+X\d+', Keyword.Constant),                   # pair
-            (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
-            (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
-            (r'[+-]?\d+(\'\d+)?', Number),
-            (r'[\[\]()]', Generic.Strong),
-            (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator),  # url
-            (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),  # url
-            (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),         # email
-            (r'comment\s"', Comment, 'commentString1'),
-            (r'comment\s\{', Comment, 'commentString2'),
-            (r'comment\s\[', Comment, 'commentBlock'),
-            (r'comment\s[^(\s{"\[]+', Comment),
-            (r'/[^(^{")\s/[\]]*', Name.Attribute),
-            (r'([^(^{")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
-            (r'<[\w:.-]*>', Name.Tag),
-            (r'<[^(<>\s")]+', Name.Tag, 'tag'),
-            (r'([^(^{")\s]+)', Text),
-        ],
-        'string': [
-            (r'[^(^")]+', String),
-            (escape_re, String.Escape),
-            (r'[(|)]+', String),
-            (r'\^.', String.Escape),
-            (r'"', String, '#pop'),
-        ],
-        'string2': [
-            (r'[^(^{})]+', String),
-            (escape_re, String.Escape),
-            (r'[(|)]+', String),
-            (r'\^.', String.Escape),
-            (r'\{', String, '#push'),
-            (r'\}', String, '#pop'),
-        ],
-        'stringFile': [
-            (r'[^(^")]+', Name.Decorator),
-            (escape_re, Name.Decorator),
-            (r'\^.', Name.Decorator),
-            (r'"', Name.Decorator, '#pop'),
-        ],
-        'char': [
-            (escape_re + '"', String.Char, '#pop'),
-            (r'\^."', String.Char, '#pop'),
-            (r'."', String.Char, '#pop'),
-        ],
-        'tag': [
-            (escape_re, Name.Tag),
-            (r'"', Name.Tag, 'tagString'),
-            (r'[^(<>\r\n")]+', Name.Tag),
-            (r'>', Name.Tag, '#pop'),
-        ],
-        'tagString': [
-            (r'[^(^")]+', Name.Tag),
-            (escape_re, Name.Tag),
-            (r'[(|)]+', Name.Tag),
-            (r'\^.', Name.Tag),
-            (r'"', Name.Tag, '#pop'),
-        ],
-        'tuple': [
-            (r'(\d+\.)+', Keyword.Constant),
-            (r'\d+', Keyword.Constant, '#pop'),
-        ],
-        'bin2': [
-            (r'\s+', Number.Hex),
-            (r'([01]\s*){8}', Number.Hex),
-            (r'\}', Number.Hex, '#pop'),
-        ],
-        'commentString1': [
-            (r'[^(^")]+', Comment),
-            (escape_re, Comment),
-            (r'[(|)]+', Comment),
-            (r'\^.', Comment),
-            (r'"', Comment, '#pop'),
-        ],
-        'commentString2': [
-            (r'[^(^{})]+', Comment),
-            (escape_re, Comment),
-            (r'[(|)]+', Comment),
-            (r'\^.', Comment),
-            (r'\{', Comment, '#push'),
-            (r'\}', Comment, '#pop'),
-        ],
-        'commentBlock': [
-            (r'\[', Comment, '#push'),
-            (r'\]', Comment, '#pop'),
-            (r'"', Comment, "commentString1"),
-            (r'\{', Comment, "commentString2"),
-            (r'[^(\[\]"{)]+', Comment),
-        ],
-    }
-
-    def analyse_text(text):
-        """
-        Check if code contains REBOL header and so it probably not R code
-        """
-        if re.match(r'^\s*REBOL\s*\[', text, re.IGNORECASE):
-            # The code starts with REBOL header
-            return 1.0
-        elif re.search(r'\s*REBOL\s*\[', text, re.IGNORECASE):
-            # The code contains REBOL header but also some text before it
-            return 0.5
-
-
-class RedLexer(RegexLexer):
-    """
-    A `Red-language `_ lexer.
-
-    .. versionadded:: 2.0
-    """
-    name = 'Red'
-    aliases = ['red', 'red/system']
-    filenames = ['*.red', '*.reds']
-    mimetypes = ['text/x-red', 'text/x-red-system']
-
-    flags = re.IGNORECASE | re.MULTILINE
-
-    escape_re = r'(?:\^\([0-9a-f]{1,4}\)*)'
-
-    def word_callback(lexer, match):
-        word = match.group()
-
-        if re.match(".*:$", word):
-            yield match.start(), Generic.Subheading, word
-        elif re.match(r'(if|unless|either|any|all|while|until|loop|repeat|'
-                      r'foreach|forall|func|function|does|has|switch|'
-                      r'case|reduce|compose|get|set|print|prin|equal\?|'
-                      r'not-equal\?|strict-equal\?|lesser\?|greater\?|lesser-or-equal\?|'
-                      r'greater-or-equal\?|same\?|not|type\?|stats|'
-                      r'bind|union|replace|charset|routine)$', word):
-            yield match.start(), Name.Builtin, word
-        elif re.match(r'(make|random|reflect|to|form|mold|absolute|add|divide|multiply|negate|'
-                      r'power|remainder|round|subtract|even\?|odd\?|and~|complement|or~|xor~|'
-                      r'append|at|back|change|clear|copy|find|head|head\?|index\?|insert|'
-                      r'length\?|next|pick|poke|remove|reverse|select|sort|skip|swap|tail|tail\?|'
-                      r'take|trim|create|close|delete|modify|open|open\?|query|read|rename|'
-                      r'update|write)$', word):
-            yield match.start(), Name.Function, word
-        elif re.match(r'(yes|on|no|off|true|false|tab|cr|lf|newline|escape|slash|sp|space|null|'
-                      r'none|crlf|dot|null-byte)$', word):
-            yield match.start(), Name.Builtin.Pseudo, word
-        elif re.match(r'(#system-global|#include|#enum|#define|#either|#if|#import|#export|'
-                      r'#switch|#default|#get-definition)$', word):
-            yield match.start(), Keyword.Namespace, word
-        elif re.match(r'(system|halt|quit|quit-return|do|load|q|recycle|call|run|ask|parse|'
-                      r'raise-error|return|exit|break|alias|push|pop|probe|\?\?|spec-of|body-of|'
-                      r'quote|forever)$', word):
-            yield match.start(), Name.Exception, word
-        elif re.match(r'(action\?|block\?|char\?|datatype\?|file\?|function\?|get-path\?|zero\?|'
-                      r'get-word\?|integer\?|issue\?|lit-path\?|lit-word\?|logic\?|native\?|'
-                      r'op\?|paren\?|path\?|refinement\?|set-path\?|set-word\?|string\?|unset\?|'
-                      r'any-struct\?|none\?|word\?|any-series\?)$', word):
-            yield match.start(), Keyword, word
-        elif re.match(r'(JNICALL|stdcall|cdecl|infix)$', word):
-            yield match.start(), Keyword.Namespace, word
-        elif re.match("to-.*", word):
-            yield match.start(), Keyword, word
-        elif re.match(r'(\+|-\*\*|-|\*\*|//|/|\*|and|or|xor|=\?|===|==|=|<>|<=|>=|'
-                      r'<<<|>>>|<<|>>|<|>%)$', word):
-            yield match.start(), Operator, word
-        elif re.match(r".*\!$", word):
-            yield match.start(), Keyword.Type, word
-        elif re.match("'.*", word):
-            yield match.start(), Name.Variable.Instance, word  # lit-word
-        elif re.match("#.*", word):
-            yield match.start(), Name.Label, word  # issue
-        elif re.match("%.*", word):
-            yield match.start(), Name.Decorator, word  # file
-        elif re.match(":.*", word):
-            yield match.start(), Generic.Subheading, word  # get-word
-        else:
-            yield match.start(), Name.Variable, word
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'#"', String.Char, 'char'),
-            (r'#\{[0-9a-f\s]*\}', Number.Hex),
-            (r'2#\{', Number.Hex, 'bin2'),
-            (r'64#\{[0-9a-z+/=\s]*\}', Number.Hex),
-            (r'([0-9a-f]+)(h)((\s)|(?=[\[\]{}"()]))',
-             bygroups(Number.Hex, Name.Variable, Whitespace)),
-            (r'"', String, 'string'),
-            (r'\{', String, 'string2'),
-            (r';#+.*\n', Comment.Special),
-            (r';\*+.*\n', Comment.Preproc),
-            (r';.*\n', Comment),
-            (r'%"', Name.Decorator, 'stringFile'),
-            (r'%[^(^{")\s\[\]]+', Name.Decorator),
-            (r'[+-]?([a-z]{1,3})?\$\d+(\.\d+)?', Number.Float),  # money
-            (r'[+-]?\d+\:\d+(\:\d+)?(\.\d+)?', String.Other),    # time
-            (r'\d+[\-/][0-9a-z]+[\-/]\d+(/\d+:\d+((:\d+)?'
-             r'([\.\d+]?([+-]?\d+:\d+)?)?)?)?', String.Other),   # date
-            (r'\d+(\.\d+)+\.\d+', Keyword.Constant),             # tuple
-            (r'\d+X\d+', Keyword.Constant),                   # pair
-            (r'[+-]?\d+(\'\d+)?([.,]\d*)?E[+-]?\d+', Number.Float),
-            (r'[+-]?\d+(\'\d+)?[.,]\d*', Number.Float),
-            (r'[+-]?\d+(\'\d+)?', Number),
-            (r'[\[\]()]', Generic.Strong),
-            (r'[a-z]+[^(^{"\s:)]*://[^(^{"\s)]*', Name.Decorator),  # url
-            (r'mailto:[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),  # url
-            (r'[^(^{"@\s)]+@[^(^{"@\s)]+', Name.Decorator),         # email
-            (r'comment\s"', Comment, 'commentString1'),
-            (r'comment\s\{', Comment, 'commentString2'),
-            (r'comment\s\[', Comment, 'commentBlock'),
-            (r'comment\s[^(\s{"\[]+', Comment),
-            (r'/[^(^{^")\s/[\]]*', Name.Attribute),
-            (r'([^(^{^")\s/[\]]+)(?=[:({"\s/\[\]])', word_callback),
-            (r'<[\w:.-]*>', Name.Tag),
-            (r'<[^(<>\s")]+', Name.Tag, 'tag'),
-            (r'([^(^{")\s]+)', Text),
-        ],
-        'string': [
-            (r'[^(^")]+', String),
-            (escape_re, String.Escape),
-            (r'[(|)]+', String),
-            (r'\^.', String.Escape),
-            (r'"', String, '#pop'),
-        ],
-        'string2': [
-            (r'[^(^{})]+', String),
-            (escape_re, String.Escape),
-            (r'[(|)]+', String),
-            (r'\^.', String.Escape),
-            (r'\{', String, '#push'),
-            (r'\}', String, '#pop'),
-        ],
-        'stringFile': [
-            (r'[^(^")]+', Name.Decorator),
-            (escape_re, Name.Decorator),
-            (r'\^.', Name.Decorator),
-            (r'"', Name.Decorator, '#pop'),
-        ],
-        'char': [
-            (escape_re + '"', String.Char, '#pop'),
-            (r'\^."', String.Char, '#pop'),
-            (r'."', String.Char, '#pop'),
-        ],
-        'tag': [
-            (escape_re, Name.Tag),
-            (r'"', Name.Tag, 'tagString'),
-            (r'[^(<>\r\n")]+', Name.Tag),
-            (r'>', Name.Tag, '#pop'),
-        ],
-        'tagString': [
-            (r'[^(^")]+', Name.Tag),
-            (escape_re, Name.Tag),
-            (r'[(|)]+', Name.Tag),
-            (r'\^.', Name.Tag),
-            (r'"', Name.Tag, '#pop'),
-        ],
-        'tuple': [
-            (r'(\d+\.)+', Keyword.Constant),
-            (r'\d+', Keyword.Constant, '#pop'),
-        ],
-        'bin2': [
-            (r'\s+', Number.Hex),
-            (r'([01]\s*){8}', Number.Hex),
-            (r'\}', Number.Hex, '#pop'),
-        ],
-        'commentString1': [
-            (r'[^(^")]+', Comment),
-            (escape_re, Comment),
-            (r'[(|)]+', Comment),
-            (r'\^.', Comment),
-            (r'"', Comment, '#pop'),
-        ],
-        'commentString2': [
-            (r'[^(^{})]+', Comment),
-            (escape_re, Comment),
-            (r'[(|)]+', Comment),
-            (r'\^.', Comment),
-            (r'\{', Comment, '#push'),
-            (r'\}', Comment, '#pop'),
-        ],
-        'commentBlock': [
-            (r'\[', Comment, '#push'),
-            (r'\]', Comment, '#pop'),
-            (r'"', Comment, "commentString1"),
-            (r'\{', Comment, "commentString2"),
-            (r'[^(\[\]"{)]+', Comment),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/resource.py b/venv/lib/python3.11/site-packages/pygments/lexers/resource.py
deleted file mode 100644
index 2583ba8..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/resource.py
+++ /dev/null
@@ -1,84 +0,0 @@
-"""
-    pygments.lexers.resource
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for resource definition files.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Comment, String, Number, Operator, Text, \
-    Keyword, Name
-
-__all__ = ['ResourceLexer']
-
-
-class ResourceLexer(RegexLexer):
-    """Lexer for `ICU Resource bundles
-    `_.
-
-    .. versionadded:: 2.0
-    """
-    name = 'ResourceBundle'
-    aliases = ['resourcebundle', 'resource']
-    filenames = []
-
-    _types = (':table', ':array', ':string', ':bin', ':import', ':intvector',
-              ':int', ':alias')
-
-    flags = re.MULTILINE | re.IGNORECASE
-    tokens = {
-        'root': [
-            (r'//.*?$', Comment),
-            (r'"', String, 'string'),
-            (r'-?\d+', Number.Integer),
-            (r'[,{}]', Operator),
-            (r'([^\s{:]+)(\s*)(%s?)' % '|'.join(_types),
-             bygroups(Name, Text, Keyword)),
-            (r'\s+', Text),
-            (words(_types), Keyword),
-        ],
-        'string': [
-            (r'(\\x[0-9a-f]{2}|\\u[0-9a-f]{4}|\\U00[0-9a-f]{6}|'
-             r'\\[0-7]{1,3}|\\c.|\\[abtnvfre\'"?\\]|\\\{|[^"{\\])+', String),
-            (r'\{', String.Escape, 'msgname'),
-            (r'"', String, '#pop')
-        ],
-        'msgname': [
-            (r'([^{},]+)(\s*)', bygroups(Name, String.Escape), ('#pop', 'message'))
-        ],
-        'message': [
-            (r'\{', String.Escape, 'msgname'),
-            (r'\}', String.Escape, '#pop'),
-            (r'(,)(\s*)([a-z]+)(\s*\})',
-             bygroups(Operator, String.Escape, Keyword, String.Escape), '#pop'),
-            (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)(offset)(\s*)(:)(\s*)(-?\d+)(\s*)',
-             bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
-                      String.Escape, Operator.Word, String.Escape, Operator,
-                      String.Escape, Number.Integer, String.Escape), 'choice'),
-            (r'(,)(\s*)([a-z]+)(\s*)(,)(\s*)',
-             bygroups(Operator, String.Escape, Keyword, String.Escape, Operator,
-                      String.Escape), 'choice'),
-            (r'\s+', String.Escape)
-        ],
-        'choice': [
-            (r'(=|<|>|<=|>=|!=)(-?\d+)(\s*\{)',
-             bygroups(Operator, Number.Integer, String.Escape), 'message'),
-            (r'([a-z]+)(\s*\{)', bygroups(Keyword.Type, String.Escape), 'str'),
-            (r'\}', String.Escape, ('#pop', '#pop')),
-            (r'\s+', String.Escape)
-        ],
-        'str': [
-            (r'\}', String.Escape, '#pop'),
-            (r'\{', String.Escape, 'msgname'),
-            (r'[^{}]+', String)
-        ]
-    }
-
-    def analyse_text(text):
-        if text.startswith('root:table'):
-            return 1.0
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ride.py b/venv/lib/python3.11/site-packages/pygments/lexers/ride.py
deleted file mode 100644
index 077fcc7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ride.py
+++ /dev/null
@@ -1,139 +0,0 @@
-"""
-    pygments.lexers.ride
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for the Ride programming language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, include
-from pygments.token import Comment, Keyword, Name, Number, Punctuation, \
-    String, Text
-
-__all__ = ['RideLexer']
-
-
-class RideLexer(RegexLexer):
-    """
-    For `Ride `_
-    source code.
-
-    .. versionadded:: 2.6
-    """
-
-    name = 'Ride'
-    aliases = ['ride']
-    filenames = ['*.ride']
-    mimetypes = ['text/x-ride']
-
-    validName = r'[a-zA-Z_][a-zA-Z0-9_\']*'
-
-    builtinOps = (
-        '||', '|', '>=', '>', '==', '!',
-        '=', '<=', '<', '::', ':+', ':', '!=', '/',
-        '.', '=>', '-', '+', '*', '&&', '%', '++',
-    )
-
-    globalVariablesName = (
-        'NOALG', 'MD5', 'SHA1', 'SHA224', 'SHA256', 'SHA384', 'SHA512',
-        'SHA3224', 'SHA3256', 'SHA3384', 'SHA3512', 'nil', 'this', 'unit',
-        'height', 'lastBlock', 'Buy', 'Sell', 'CEILING', 'FLOOR', 'DOWN',
-        'HALFDOWN', 'HALFEVEN', 'HALFUP', 'UP',
-    )
-
-    typesName = (
-        'Unit', 'Int', 'Boolean', 'ByteVector', 'String', 'Address', 'Alias',
-        'Transfer', 'AssetPair', 'DataEntry', 'Order', 'Transaction',
-        'GenesisTransaction', 'PaymentTransaction', 'ReissueTransaction',
-        'BurnTransaction', 'MassTransferTransaction', 'ExchangeTransaction',
-        'TransferTransaction', 'SetAssetScriptTransaction',
-        'InvokeScriptTransaction', 'IssueTransaction', 'LeaseTransaction',
-        'LeaseCancelTransaction', 'CreateAliasTransaction',
-        'SetScriptTransaction', 'SponsorFeeTransaction', 'DataTransaction',
-        'WriteSet', 'AttachedPayment', 'ScriptTransfer', 'TransferSet',
-        'ScriptResult', 'Invocation', 'Asset', 'BlockInfo', 'Issue', 'Reissue',
-        'Burn', 'NoAlg', 'Md5', 'Sha1', 'Sha224', 'Sha256', 'Sha384', 'Sha512',
-        'Sha3224', 'Sha3256', 'Sha3384', 'Sha3512', 'BinaryEntry',
-        'BooleanEntry', 'IntegerEntry', 'StringEntry', 'List', 'Ceiling',
-        'Down', 'Floor', 'HalfDown', 'HalfEven', 'HalfUp', 'Up',
-    )
-
-    functionsName = (
-        'fraction', 'size', 'toBytes', 'take', 'drop', 'takeRight', 'dropRight',
-        'toString', 'isDefined', 'extract', 'throw', 'getElement', 'value',
-        'cons', 'toUtf8String', 'toInt', 'indexOf', 'lastIndexOf', 'split',
-        'parseInt', 'parseIntValue', 'keccak256', 'blake2b256', 'sha256',
-        'sigVerify', 'toBase58String', 'fromBase58String', 'toBase64String',
-        'fromBase64String', 'transactionById', 'transactionHeightById',
-        'getInteger', 'getBoolean', 'getBinary', 'getString',
-        'addressFromPublicKey', 'addressFromString', 'addressFromRecipient',
-        'assetBalance', 'wavesBalance', 'getIntegerValue', 'getBooleanValue',
-        'getBinaryValue', 'getStringValue', 'addressFromStringValue',
-        'assetInfo', 'rsaVerify', 'checkMerkleProof', 'median',
-        'valueOrElse', 'valueOrErrorMessage', 'contains', 'log', 'pow',
-        'toBase16String', 'fromBase16String', 'blockInfoByHeight',
-        'transferTransactionById',
-    )
-
-    reservedWords = words((
-        'match', 'case', 'else', 'func', 'if',
-        'let', 'then', '@Callable', '@Verifier',
-    ), suffix=r'\b')
-
-    tokens = {
-        'root': [
-            # Comments
-            (r'#.*', Comment.Single),
-            # Whitespace
-            (r'\s+', Text),
-            # Strings
-            (r'"', String, 'doublequote'),
-            (r'utf8\'', String, 'utf8quote'),
-            (r'base(58|64|16)\'', String, 'singlequote'),
-            # Keywords
-            (reservedWords, Keyword.Reserved),
-            (r'\{-#.*?#-\}', Keyword.Reserved),
-            (r'FOLD<\d+>', Keyword.Reserved),
-            # Types
-            (words(typesName), Keyword.Type),
-            # Main
-            # (specialName, Keyword.Reserved),
-            # Prefix Operators
-            (words(builtinOps, prefix=r'\(', suffix=r'\)'), Name.Function),
-            # Infix Operators
-            (words(builtinOps), Name.Function),
-            (words(globalVariablesName), Name.Function),
-            (words(functionsName), Name.Function),
-            # Numbers
-            include('numbers'),
-            # Variable Names
-            (validName, Name.Variable),
-            # Parens
-            (r'[,()\[\]{}]', Punctuation),
-        ],
-
-        'doublequote': [
-            (r'\\u[0-9a-fA-F]{4}', String.Escape),
-            (r'\\[nrfvb\\"]', String.Escape),
-            (r'[^"]', String),
-            (r'"', String, '#pop'),
-        ],
-
-        'utf8quote': [
-            (r'\\u[0-9a-fA-F]{4}', String.Escape),
-            (r'\\[nrfvb\\\']', String.Escape),
-            (r'[^\']', String),
-            (r'\'', String, '#pop'),
-        ],
-
-        'singlequote': [
-            (r'[^\']', String),
-            (r'\'', String, '#pop'),
-        ],
-
-        'numbers': [
-            (r'_?\d+', Number.Integer),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rita.py b/venv/lib/python3.11/site-packages/pygments/lexers/rita.py
deleted file mode 100644
index 9aa8569..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rita.py
+++ /dev/null
@@ -1,43 +0,0 @@
-"""
-    pygments.lexers.rita
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for RITA language
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Comment, Operator, Keyword, Name, Literal, \
-    Punctuation, Whitespace
-
-__all__ = ['RitaLexer']
-
-
-class RitaLexer(RegexLexer):
-    """
-    Lexer for RITA.
-
-    .. versionadded:: 2.11
-    """
-    name = 'Rita'
-    url = 'https://github.com/zaibacu/rita-dsl'
-    filenames = ['*.rita']
-    aliases = ['rita']
-    mimetypes = ['text/rita']
-
-    tokens = {
-        'root': [
-            (r'\n', Whitespace),
-            (r'\s+', Whitespace),
-            (r'#(.*?)\n', Comment.Single),
-            (r'@(.*?)\n', Operator),  # Yes, whole line as an operator
-            (r'"(\w|\d|\s|(\\")|[\'_\-./,\?\!])+?"', Literal),
-            (r'\'(\w|\d|\s|(\\\')|["_\-./,\?\!])+?\'', Literal),
-            (r'([A-Z_]+)', Keyword),
-            (r'([a-z0-9_]+)', Name),
-            (r'((->)|[!?+*|=])', Operator),
-            (r'[\(\),\{\}]', Punctuation)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rnc.py b/venv/lib/python3.11/site-packages/pygments/lexers/rnc.py
deleted file mode 100644
index d717175..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rnc.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-    pygments.lexers.rnc
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Relax-NG Compact syntax
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Punctuation
-
-__all__ = ['RNCCompactLexer']
-
-
-class RNCCompactLexer(RegexLexer):
-    """
-    For RelaxNG-compact syntax.
-
-    .. versionadded:: 2.2
-    """
-
-    name = 'Relax-NG Compact'
-    url = 'http://relaxng.org'
-    aliases = ['rng-compact', 'rnc']
-    filenames = ['*.rnc']
-
-    tokens = {
-        'root': [
-            (r'namespace\b', Keyword.Namespace),
-            (r'(?:default|datatypes)\b', Keyword.Declaration),
-            (r'##.*$', Comment.Preproc),
-            (r'#.*$', Comment.Single),
-            (r'"[^"]*"', String.Double),
-            # TODO single quoted strings and escape sequences outside of
-            # double-quoted strings
-            (r'(?:element|attribute|mixed)\b', Keyword.Declaration, 'variable'),
-            (r'(text\b|xsd:[^ ]+)', Keyword.Type, 'maybe_xsdattributes'),
-            (r'[,?&*=|~]|>>', Operator),
-            (r'[(){}]', Punctuation),
-            (r'.', Text),
-        ],
-
-        # a variable has been declared using `element` or `attribute`
-        'variable': [
-            (r'[^{]+', Name.Variable),
-            (r'\{', Punctuation, '#pop'),
-        ],
-
-        # after an xsd: declaration there may be attributes
-        'maybe_xsdattributes': [
-            (r'\{', Punctuation, 'xsdattributes'),
-            (r'\}', Punctuation, '#pop'),
-            (r'.', Text),
-        ],
-
-        # attributes take the form { key1 = value1 key2 = value2 ... }
-        'xsdattributes': [
-            (r'[^ =}]', Name.Attribute),
-            (r'=', Operator),
-            (r'"[^"]*"', String.Double),
-            (r'\}', Punctuation, '#pop'),
-            (r'.', Text),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py b/venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py
deleted file mode 100644
index 5d7d76e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/roboconf.py
+++ /dev/null
@@ -1,81 +0,0 @@
-"""
-    pygments.lexers.roboconf
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Roboconf DSL.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words, re
-from pygments.token import Text, Operator, Keyword, Name, Comment
-
-__all__ = ['RoboconfGraphLexer', 'RoboconfInstancesLexer']
-
-
-class RoboconfGraphLexer(RegexLexer):
-    """
-    Lexer for Roboconf graph files.
-
-    .. versionadded:: 2.1
-    """
-    name = 'Roboconf Graph'
-    aliases = ['roboconf-graph']
-    filenames = ['*.graph']
-
-    flags = re.IGNORECASE | re.MULTILINE
-    tokens = {
-        'root': [
-            # Skip white spaces
-            (r'\s+', Text),
-
-            # There is one operator
-            (r'=', Operator),
-
-            # Keywords
-            (words(('facet', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
-            (words((
-                'installer', 'extends', 'exports', 'imports', 'facets',
-                'children'), suffix=r'\s*:?', prefix=r'\b'), Name),
-
-            # Comments
-            (r'#.*\n', Comment),
-
-            # Default
-            (r'[^#]', Text),
-            (r'.*\n', Text)
-        ]
-    }
-
-
-class RoboconfInstancesLexer(RegexLexer):
-    """
-    Lexer for Roboconf instances files.
-
-    .. versionadded:: 2.1
-    """
-    name = 'Roboconf Instances'
-    aliases = ['roboconf-instances']
-    filenames = ['*.instances']
-
-    flags = re.IGNORECASE | re.MULTILINE
-    tokens = {
-        'root': [
-
-            # Skip white spaces
-            (r'\s+', Text),
-
-            # Keywords
-            (words(('instance of', 'import'), suffix=r'\s*\b', prefix=r'\b'), Keyword),
-            (words(('name', 'count'), suffix=r's*:?', prefix=r'\b'), Name),
-            (r'\s*[\w.-]+\s*:', Name),
-
-            # Comments
-            (r'#.*\n', Comment),
-
-            # Default
-            (r'[^#]', Text),
-            (r'.*\n', Text)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py b/venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py
deleted file mode 100644
index 3b676cc..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/robotframework.py
+++ /dev/null
@@ -1,552 +0,0 @@
-"""
-    pygments.lexers.robotframework
-    ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Robot Framework.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-#  Copyright 2012 Nokia Siemens Networks Oyj
-#
-#  Licensed under the Apache License, Version 2.0 (the "License");
-#  you may not use this file except in compliance with the License.
-#  You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-#  Unless required by applicable law or agreed to in writing, software
-#  distributed under the License is distributed on an "AS IS" BASIS,
-#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-#  See the License for the specific language governing permissions and
-#  limitations under the License.
-
-import re
-
-from pygments.lexer import Lexer
-from pygments.token import Token
-
-__all__ = ['RobotFrameworkLexer']
-
-
-HEADING = Token.Generic.Heading
-SETTING = Token.Keyword.Namespace
-IMPORT = Token.Name.Namespace
-TC_KW_NAME = Token.Generic.Subheading
-KEYWORD = Token.Name.Function
-ARGUMENT = Token.String
-VARIABLE = Token.Name.Variable
-COMMENT = Token.Comment
-SEPARATOR = Token.Punctuation
-SYNTAX = Token.Punctuation
-GHERKIN = Token.Generic.Emph
-ERROR = Token.Error
-
-
-def normalize(string, remove=''):
-    string = string.lower()
-    for char in remove + ' ':
-        if char in string:
-            string = string.replace(char, '')
-    return string
-
-
-class RobotFrameworkLexer(Lexer):
-    """
-    For Robot Framework test data.
-
-    Supports both space and pipe separated plain text formats.
-
-    .. versionadded:: 1.6
-    """
-    name = 'RobotFramework'
-    url = 'http://robotframework.org'
-    aliases = ['robotframework']
-    filenames = ['*.robot', '*.resource']
-    mimetypes = ['text/x-robotframework']
-
-    def __init__(self, **options):
-        options['tabsize'] = 2
-        options['encoding'] = 'UTF-8'
-        Lexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        row_tokenizer = RowTokenizer()
-        var_tokenizer = VariableTokenizer()
-        index = 0
-        for row in text.splitlines():
-            for value, token in row_tokenizer.tokenize(row):
-                for value, token in var_tokenizer.tokenize(value, token):
-                    if value:
-                        yield index, token, str(value)
-                        index += len(value)
-
-
-class VariableTokenizer:
-
-    def tokenize(self, string, token):
-        var = VariableSplitter(string, identifiers='$@%&')
-        if var.start < 0 or token in (COMMENT, ERROR):
-            yield string, token
-            return
-        for value, token in self._tokenize(var, string, token):
-            if value:
-                yield value, token
-
-    def _tokenize(self, var, string, orig_token):
-        before = string[:var.start]
-        yield before, orig_token
-        yield var.identifier + '{', SYNTAX
-        yield from self.tokenize(var.base, VARIABLE)
-        yield '}', SYNTAX
-        if var.index is not None:
-            yield '[', SYNTAX
-            yield from self.tokenize(var.index, VARIABLE)
-            yield ']', SYNTAX
-        yield from self.tokenize(string[var.end:], orig_token)
-
-
-class RowTokenizer:
-
-    def __init__(self):
-        self._table = UnknownTable()
-        self._splitter = RowSplitter()
-        testcases = TestCaseTable()
-        settings = SettingTable(testcases.set_default_template)
-        variables = VariableTable()
-        keywords = KeywordTable()
-        self._tables = {'settings': settings, 'setting': settings,
-                        'metadata': settings,
-                        'variables': variables, 'variable': variables,
-                        'testcases': testcases, 'testcase': testcases,
-                        'tasks': testcases, 'task': testcases,
-                        'keywords': keywords, 'keyword': keywords,
-                        'userkeywords': keywords, 'userkeyword': keywords}
-
-    def tokenize(self, row):
-        commented = False
-        heading = False
-        for index, value in enumerate(self._splitter.split(row)):
-            # First value, and every second after that, is a separator.
-            index, separator = divmod(index-1, 2)
-            if value.startswith('#'):
-                commented = True
-            elif index == 0 and value.startswith('*'):
-                self._table = self._start_table(value)
-                heading = True
-            yield from self._tokenize(value, index, commented,
-                                      separator, heading)
-        self._table.end_row()
-
-    def _start_table(self, header):
-        name = normalize(header, remove='*')
-        return self._tables.get(name, UnknownTable())
-
-    def _tokenize(self, value, index, commented, separator, heading):
-        if commented:
-            yield value, COMMENT
-        elif separator:
-            yield value, SEPARATOR
-        elif heading:
-            yield value, HEADING
-        else:
-            yield from self._table.tokenize(value, index)
-
-
-class RowSplitter:
-    _space_splitter = re.compile('( {2,})')
-    _pipe_splitter = re.compile(r'((?:^| +)\|(?: +|$))')
-
-    def split(self, row):
-        splitter = (row.startswith('| ') and self._split_from_pipes
-                    or self._split_from_spaces)
-        yield from splitter(row)
-        yield '\n'
-
-    def _split_from_spaces(self, row):
-        yield ''  # Start with (pseudo)separator similarly as with pipes
-        yield from self._space_splitter.split(row)
-
-    def _split_from_pipes(self, row):
-        _, separator, rest = self._pipe_splitter.split(row, 1)
-        yield separator
-        while self._pipe_splitter.search(rest):
-            cell, separator, rest = self._pipe_splitter.split(rest, 1)
-            yield cell
-            yield separator
-        yield rest
-
-
-class Tokenizer:
-    _tokens = None
-
-    def __init__(self):
-        self._index = 0
-
-    def tokenize(self, value):
-        values_and_tokens = self._tokenize(value, self._index)
-        self._index += 1
-        if isinstance(values_and_tokens, type(Token)):
-            values_and_tokens = [(value, values_and_tokens)]
-        return values_and_tokens
-
-    def _tokenize(self, value, index):
-        index = min(index, len(self._tokens) - 1)
-        return self._tokens[index]
-
-    def _is_assign(self, value):
-        if value.endswith('='):
-            value = value[:-1].strip()
-        var = VariableSplitter(value, identifiers='$@&')
-        return var.start == 0 and var.end == len(value)
-
-
-class Comment(Tokenizer):
-    _tokens = (COMMENT,)
-
-
-class Setting(Tokenizer):
-    _tokens = (SETTING, ARGUMENT)
-    _keyword_settings = ('suitesetup', 'suiteprecondition', 'suiteteardown',
-                         'suitepostcondition', 'testsetup', 'tasksetup', 'testprecondition',
-                         'testteardown','taskteardown', 'testpostcondition', 'testtemplate', 'tasktemplate')
-    _import_settings = ('library', 'resource', 'variables')
-    _other_settings = ('documentation', 'metadata', 'forcetags', 'defaulttags',
-                       'testtimeout','tasktimeout')
-    _custom_tokenizer = None
-
-    def __init__(self, template_setter=None):
-        Tokenizer.__init__(self)
-        self._template_setter = template_setter
-
-    def _tokenize(self, value, index):
-        if index == 1 and self._template_setter:
-            self._template_setter(value)
-        if index == 0:
-            normalized = normalize(value)
-            if normalized in self._keyword_settings:
-                self._custom_tokenizer = KeywordCall(support_assign=False)
-            elif normalized in self._import_settings:
-                self._custom_tokenizer = ImportSetting()
-            elif normalized not in self._other_settings:
-                return ERROR
-        elif self._custom_tokenizer:
-            return self._custom_tokenizer.tokenize(value)
-        return Tokenizer._tokenize(self, value, index)
-
-
-class ImportSetting(Tokenizer):
-    _tokens = (IMPORT, ARGUMENT)
-
-
-class TestCaseSetting(Setting):
-    _keyword_settings = ('setup', 'precondition', 'teardown', 'postcondition',
-                         'template')
-    _import_settings = ()
-    _other_settings = ('documentation', 'tags', 'timeout')
-
-    def _tokenize(self, value, index):
-        if index == 0:
-            type = Setting._tokenize(self, value[1:-1], index)
-            return [('[', SYNTAX), (value[1:-1], type), (']', SYNTAX)]
-        return Setting._tokenize(self, value, index)
-
-
-class KeywordSetting(TestCaseSetting):
-    _keyword_settings = ('teardown',)
-    _other_settings = ('documentation', 'arguments', 'return', 'timeout', 'tags')
-
-
-class Variable(Tokenizer):
-    _tokens = (SYNTAX, ARGUMENT)
-
-    def _tokenize(self, value, index):
-        if index == 0 and not self._is_assign(value):
-            return ERROR
-        return Tokenizer._tokenize(self, value, index)
-
-
-class KeywordCall(Tokenizer):
-    _tokens = (KEYWORD, ARGUMENT)
-
-    def __init__(self, support_assign=True):
-        Tokenizer.__init__(self)
-        self._keyword_found = not support_assign
-        self._assigns = 0
-
-    def _tokenize(self, value, index):
-        if not self._keyword_found and self._is_assign(value):
-            self._assigns += 1
-            return SYNTAX  # VariableTokenizer tokenizes this later.
-        if self._keyword_found:
-            return Tokenizer._tokenize(self, value, index - self._assigns)
-        self._keyword_found = True
-        return GherkinTokenizer().tokenize(value, KEYWORD)
-
-
-class GherkinTokenizer:
-    _gherkin_prefix = re.compile('^(Given|When|Then|And|But) ', re.IGNORECASE)
-
-    def tokenize(self, value, token):
-        match = self._gherkin_prefix.match(value)
-        if not match:
-            return [(value, token)]
-        end = match.end()
-        return [(value[:end], GHERKIN), (value[end:], token)]
-
-
-class TemplatedKeywordCall(Tokenizer):
-    _tokens = (ARGUMENT,)
-
-
-class ForLoop(Tokenizer):
-
-    def __init__(self):
-        Tokenizer.__init__(self)
-        self._in_arguments = False
-
-    def _tokenize(self, value, index):
-        token = self._in_arguments and ARGUMENT or SYNTAX
-        if value.upper() in ('IN', 'IN RANGE'):
-            self._in_arguments = True
-        return token
-
-
-class _Table:
-    _tokenizer_class = None
-
-    def __init__(self, prev_tokenizer=None):
-        self._tokenizer = self._tokenizer_class()
-        self._prev_tokenizer = prev_tokenizer
-        self._prev_values_on_row = []
-
-    def tokenize(self, value, index):
-        if self._continues(value, index):
-            self._tokenizer = self._prev_tokenizer
-            yield value, SYNTAX
-        else:
-            yield from self._tokenize(value, index)
-        self._prev_values_on_row.append(value)
-
-    def _continues(self, value, index):
-        return value == '...' and all(self._is_empty(t)
-                                      for t in self._prev_values_on_row)
-
-    def _is_empty(self, value):
-        return value in ('', '\\')
-
-    def _tokenize(self, value, index):
-        return self._tokenizer.tokenize(value)
-
-    def end_row(self):
-        self.__init__(prev_tokenizer=self._tokenizer)
-
-
-class UnknownTable(_Table):
-    _tokenizer_class = Comment
-
-    def _continues(self, value, index):
-        return False
-
-
-class VariableTable(_Table):
-    _tokenizer_class = Variable
-
-
-class SettingTable(_Table):
-    _tokenizer_class = Setting
-
-    def __init__(self, template_setter, prev_tokenizer=None):
-        _Table.__init__(self, prev_tokenizer)
-        self._template_setter = template_setter
-
-    def _tokenize(self, value, index):
-        if index == 0 and normalize(value) == 'testtemplate':
-            self._tokenizer = Setting(self._template_setter)
-        return _Table._tokenize(self, value, index)
-
-    def end_row(self):
-        self.__init__(self._template_setter, prev_tokenizer=self._tokenizer)
-
-
-class TestCaseTable(_Table):
-    _setting_class = TestCaseSetting
-    _test_template = None
-    _default_template = None
-
-    @property
-    def _tokenizer_class(self):
-        if self._test_template or (self._default_template and
-                                   self._test_template is not False):
-            return TemplatedKeywordCall
-        return KeywordCall
-
-    def _continues(self, value, index):
-        return index > 0 and _Table._continues(self, value, index)
-
-    def _tokenize(self, value, index):
-        if index == 0:
-            if value:
-                self._test_template = None
-            return GherkinTokenizer().tokenize(value, TC_KW_NAME)
-        if index == 1 and self._is_setting(value):
-            if self._is_template(value):
-                self._test_template = False
-                self._tokenizer = self._setting_class(self.set_test_template)
-            else:
-                self._tokenizer = self._setting_class()
-        if index == 1 and self._is_for_loop(value):
-            self._tokenizer = ForLoop()
-        if index == 1 and self._is_empty(value):
-            return [(value, SYNTAX)]
-        return _Table._tokenize(self, value, index)
-
-    def _is_setting(self, value):
-        return value.startswith('[') and value.endswith(']')
-
-    def _is_template(self, value):
-        return normalize(value) == '[template]'
-
-    def _is_for_loop(self, value):
-        return value.startswith(':') and normalize(value, remove=':') == 'for'
-
-    def set_test_template(self, template):
-        self._test_template = self._is_template_set(template)
-
-    def set_default_template(self, template):
-        self._default_template = self._is_template_set(template)
-
-    def _is_template_set(self, template):
-        return normalize(template) not in ('', '\\', 'none', '${empty}')
-
-
-class KeywordTable(TestCaseTable):
-    _tokenizer_class = KeywordCall
-    _setting_class = KeywordSetting
-
-    def _is_template(self, value):
-        return False
-
-
-# Following code copied directly from Robot Framework 2.7.5.
-
-class VariableSplitter:
-
-    def __init__(self, string, identifiers):
-        self.identifier = None
-        self.base = None
-        self.index = None
-        self.start = -1
-        self.end = -1
-        self._identifiers = identifiers
-        self._may_have_internal_variables = False
-        try:
-            self._split(string)
-        except ValueError:
-            pass
-        else:
-            self._finalize()
-
-    def get_replaced_base(self, variables):
-        if self._may_have_internal_variables:
-            return variables.replace_string(self.base)
-        return self.base
-
-    def _finalize(self):
-        self.identifier = self._variable_chars[0]
-        self.base = ''.join(self._variable_chars[2:-1])
-        self.end = self.start + len(self._variable_chars)
-        if self._has_list_or_dict_variable_index():
-            self.index = ''.join(self._list_and_dict_variable_index_chars[1:-1])
-            self.end += len(self._list_and_dict_variable_index_chars)
-
-    def _has_list_or_dict_variable_index(self):
-        return self._list_and_dict_variable_index_chars\
-        and self._list_and_dict_variable_index_chars[-1] == ']'
-
-    def _split(self, string):
-        start_index, max_index = self._find_variable(string)
-        self.start = start_index
-        self._open_curly = 1
-        self._state = self._variable_state
-        self._variable_chars = [string[start_index], '{']
-        self._list_and_dict_variable_index_chars = []
-        self._string = string
-        start_index += 2
-        for index, char in enumerate(string[start_index:]):
-            index += start_index  # Giving start to enumerate only in Py 2.6+
-            try:
-                self._state(char, index)
-            except StopIteration:
-                return
-            if index  == max_index and not self._scanning_list_variable_index():
-                return
-
-    def _scanning_list_variable_index(self):
-        return self._state in [self._waiting_list_variable_index_state,
-                               self._list_variable_index_state]
-
-    def _find_variable(self, string):
-        max_end_index = string.rfind('}')
-        if max_end_index == -1:
-            raise ValueError('No variable end found')
-        if self._is_escaped(string, max_end_index):
-            return self._find_variable(string[:max_end_index])
-        start_index = self._find_start_index(string, 1, max_end_index)
-        if start_index == -1:
-            raise ValueError('No variable start found')
-        return start_index, max_end_index
-
-    def _find_start_index(self, string, start, end):
-        index = string.find('{', start, end) - 1
-        if index < 0:
-            return -1
-        if self._start_index_is_ok(string, index):
-            return index
-        return self._find_start_index(string, index+2, end)
-
-    def _start_index_is_ok(self, string, index):
-        return string[index] in self._identifiers\
-        and not self._is_escaped(string, index)
-
-    def _is_escaped(self, string, index):
-        escaped = False
-        while index > 0 and string[index-1] == '\\':
-            index -= 1
-            escaped = not escaped
-        return escaped
-
-    def _variable_state(self, char, index):
-        self._variable_chars.append(char)
-        if char == '}' and not self._is_escaped(self._string, index):
-            self._open_curly -= 1
-            if self._open_curly == 0:
-                if not self._is_list_or_dict_variable():
-                    raise StopIteration
-                self._state = self._waiting_list_variable_index_state
-        elif char in self._identifiers:
-            self._state = self._internal_variable_start_state
-
-    def _is_list_or_dict_variable(self):
-        return self._variable_chars[0] in ('@','&')
-
-    def _internal_variable_start_state(self, char, index):
-        self._state = self._variable_state
-        if char == '{':
-            self._variable_chars.append(char)
-            self._open_curly += 1
-            self._may_have_internal_variables = True
-        else:
-            self._variable_state(char, index)
-
-    def _waiting_list_variable_index_state(self, char, index):
-        if char != '[':
-            raise StopIteration
-        self._list_and_dict_variable_index_chars.append(char)
-        self._state = self._list_variable_index_state
-
-    def _list_variable_index_state(self, char, index):
-        self._list_and_dict_variable_index_chars.append(char)
-        if char == ']':
-            raise StopIteration
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/ruby.py b/venv/lib/python3.11/site-packages/pygments/lexers/ruby.py
deleted file mode 100644
index 466d6e7..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/ruby.py
+++ /dev/null
@@ -1,516 +0,0 @@
-"""
-    pygments.lexers.ruby
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Ruby and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, ExtendedRegexLexer, include, \
-    bygroups, default, LexerContext, do_insertions, words, line_re
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Error, Generic, Whitespace
-from pygments.util import shebang_matches
-
-__all__ = ['RubyLexer', 'RubyConsoleLexer', 'FancyLexer']
-
-
-RUBY_OPERATORS = (
-    '*', '**', '-', '+', '-@', '+@', '/', '%', '&', '|', '^', '`', '~',
-    '[]', '[]=', '<<', '>>', '<', '<>', '<=>', '>', '>=', '==', '==='
-)
-
-
-class RubyLexer(ExtendedRegexLexer):
-    """
-    For Ruby source code.
-    """
-
-    name = 'Ruby'
-    url = 'http://www.ruby-lang.org'
-    aliases = ['ruby', 'rb', 'duby']
-    filenames = ['*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec',
-                 '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile']
-    mimetypes = ['text/x-ruby', 'application/x-ruby']
-
-    flags = re.DOTALL | re.MULTILINE
-
-    def heredoc_callback(self, match, ctx):
-        # okay, this is the hardest part of parsing Ruby...
-        # match: 1 = <<[-~]?, 2 = quote? 3 = name 4 = quote? 5 = rest of line
-
-        start = match.start(1)
-        yield start, Operator, match.group(1)        # <<[-~]?
-        yield match.start(2), String.Heredoc, match.group(2)   # quote ", ', `
-        yield match.start(3), String.Delimiter, match.group(3) # heredoc name
-        yield match.start(4), String.Heredoc, match.group(4)   # quote again
-
-        heredocstack = ctx.__dict__.setdefault('heredocstack', [])
-        outermost = not bool(heredocstack)
-        heredocstack.append((match.group(1) in ('<<-', '<<~'), match.group(3)))
-
-        ctx.pos = match.start(5)
-        ctx.end = match.end(5)
-        # this may find other heredocs, so limit the recursion depth
-        if len(heredocstack) < 100:
-            yield from self.get_tokens_unprocessed(context=ctx)
-        else:
-            yield ctx.pos, String.Heredoc, match.group(5)
-        ctx.pos = match.end()
-
-        if outermost:
-            # this is the outer heredoc again, now we can process them all
-            for tolerant, hdname in heredocstack:
-                lines = []
-                for match in line_re.finditer(ctx.text, ctx.pos):
-                    if tolerant:
-                        check = match.group().strip()
-                    else:
-                        check = match.group().rstrip()
-                    if check == hdname:
-                        for amatch in lines:
-                            yield amatch.start(), String.Heredoc, amatch.group()
-                        yield match.start(), String.Delimiter, match.group()
-                        ctx.pos = match.end()
-                        break
-                    else:
-                        lines.append(match)
-                else:
-                    # end of heredoc not found -- error!
-                    for amatch in lines:
-                        yield amatch.start(), Error, amatch.group()
-            ctx.end = len(ctx.text)
-            del heredocstack[:]
-
-    def gen_rubystrings_rules():
-        def intp_regex_callback(self, match, ctx):
-            yield match.start(1), String.Regex, match.group(1)  # begin
-            nctx = LexerContext(match.group(3), 0, ['interpolated-regex'])
-            for i, t, v in self.get_tokens_unprocessed(context=nctx):
-                yield match.start(3)+i, t, v
-            yield match.start(4), String.Regex, match.group(4)  # end[mixounse]*
-            ctx.pos = match.end()
-
-        def intp_string_callback(self, match, ctx):
-            yield match.start(1), String.Other, match.group(1)
-            nctx = LexerContext(match.group(3), 0, ['interpolated-string'])
-            for i, t, v in self.get_tokens_unprocessed(context=nctx):
-                yield match.start(3)+i, t, v
-            yield match.start(4), String.Other, match.group(4)  # end
-            ctx.pos = match.end()
-
-        states = {}
-        states['strings'] = [
-            # easy ones
-            (r'\:@{0,2}[a-zA-Z_]\w*[!?]?', String.Symbol),
-            (words(RUBY_OPERATORS, prefix=r'\:@{0,2}'), String.Symbol),
-            (r":'(\\\\|\\[^\\]|[^'\\])*'", String.Symbol),
-            (r':"', String.Symbol, 'simple-sym'),
-            (r'([a-zA-Z_]\w*)(:)(?!:)',
-             bygroups(String.Symbol, Punctuation)),  # Since Ruby 1.9
-            (r'"', String.Double, 'simple-string-double'),
-            (r"'", String.Single, 'simple-string-single'),
-            (r'(?', '<>', 'ab'):
-            states[name+'-intp-string'] = [
-                (r'\\[\\' + bracecc + ']', String.Other),
-                (lbrace, String.Other, '#push'),
-                (rbrace, String.Other, '#pop'),
-                include('string-intp-escaped'),
-                (r'[\\#' + bracecc + ']', String.Other),
-                (r'[^\\#' + bracecc + ']+', String.Other),
-            ]
-            states['strings'].append((r'%[QWx]?' + lbrace, String.Other,
-                                      name+'-intp-string'))
-            states[name+'-string'] = [
-                (r'\\[\\' + bracecc + ']', String.Other),
-                (lbrace, String.Other, '#push'),
-                (rbrace, String.Other, '#pop'),
-                (r'[\\#' + bracecc + ']', String.Other),
-                (r'[^\\#' + bracecc + ']+', String.Other),
-            ]
-            states['strings'].append((r'%[qsw]' + lbrace, String.Other,
-                                      name+'-string'))
-            states[name+'-regex'] = [
-                (r'\\[\\' + bracecc + ']', String.Regex),
-                (lbrace, String.Regex, '#push'),
-                (rbrace + '[mixounse]*', String.Regex, '#pop'),
-                include('string-intp'),
-                (r'[\\#' + bracecc + ']', String.Regex),
-                (r'[^\\#' + bracecc + ']+', String.Regex),
-            ]
-            states['strings'].append((r'%r' + lbrace, String.Regex,
-                                      name+'-regex'))
-
-        # these must come after %!
-        states['strings'] += [
-            # %r regex
-            (r'(%r([\W_]))((?:\\\2|(?!\2).)*)(\2[mixounse]*)',
-             intp_regex_callback),
-            # regular fancy strings with qsw
-            (r'%[qsw]([\W_])((?:\\\1|(?!\1).)*)\1', String.Other),
-            (r'(%[QWx]([\W_]))((?:\\\2|(?!\2).)*)(\2)',
-             intp_string_callback),
-            # special forms of fancy strings after operators or
-            # in method calls with braces
-            (r'(?<=[-+/*%=<>&!^|~,(])(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
-             bygroups(Whitespace, String.Other, None)),
-            # and because of fixed width lookbehinds the whole thing a
-            # second time for line startings...
-            (r'^(\s*)(%([\t ])(?:(?:\\\3|(?!\3).)*)\3)',
-             bygroups(Whitespace, String.Other, None)),
-            # all regular fancy strings without qsw
-            (r'(%([^a-zA-Z0-9\s]))((?:\\\2|(?!\2).)*)(\2)',
-             intp_string_callback),
-        ]
-
-        return states
-
-    tokens = {
-        'root': [
-            (r'\A#!.+?$', Comment.Hashbang),
-            (r'#.*?$', Comment.Single),
-            (r'=begin\s.*?\n=end.*?$', Comment.Multiline),
-            # keywords
-            (words((
-                'BEGIN', 'END', 'alias', 'begin', 'break', 'case', 'defined?',
-                'do', 'else', 'elsif', 'end', 'ensure', 'for', 'if', 'in', 'next', 'redo',
-                'rescue', 'raise', 'retry', 'return', 'super', 'then', 'undef',
-                'unless', 'until', 'when', 'while', 'yield'), suffix=r'\b'),
-             Keyword),
-            # start of function, class and module names
-            (r'(module)(\s+)([a-zA-Z_]\w*'
-             r'(?:::[a-zA-Z_]\w*)*)',
-             bygroups(Keyword, Whitespace, Name.Namespace)),
-            (r'(def)(\s+)', bygroups(Keyword, Whitespace), 'funcname'),
-            (r'def(?=[*%&^`~+-/\[<>=])', Keyword, 'funcname'),
-            (r'(class)(\s+)', bygroups(Keyword, Whitespace), 'classname'),
-            # special methods
-            (words((
-                'initialize', 'new', 'loop', 'include', 'extend', 'raise', 'attr_reader',
-                'attr_writer', 'attr_accessor', 'attr', 'catch', 'throw', 'private',
-                'module_function', 'public', 'protected', 'true', 'false', 'nil'),
-                suffix=r'\b'),
-             Keyword.Pseudo),
-            (r'(not|and|or)\b', Operator.Word),
-            (words((
-                'autoload', 'block_given', 'const_defined', 'eql', 'equal', 'frozen', 'include',
-                'instance_of', 'is_a', 'iterator', 'kind_of', 'method_defined', 'nil',
-                'private_method_defined', 'protected_method_defined',
-                'public_method_defined', 'respond_to', 'tainted'), suffix=r'\?'),
-             Name.Builtin),
-            (r'(chomp|chop|exit|gsub|sub)!', Name.Builtin),
-            (words((
-                'Array', 'Float', 'Integer', 'String', '__id__', '__send__', 'abort',
-                'ancestors', 'at_exit', 'autoload', 'binding', 'callcc', 'caller',
-                'catch', 'chomp', 'chop', 'class_eval', 'class_variables',
-                'clone', 'const_defined?', 'const_get', 'const_missing', 'const_set',
-                'constants', 'display', 'dup', 'eval', 'exec', 'exit', 'extend', 'fail', 'fork',
-                'format', 'freeze', 'getc', 'gets', 'global_variables', 'gsub',
-                'hash', 'id', 'included_modules', 'inspect', 'instance_eval',
-                'instance_method', 'instance_methods',
-                'instance_variable_get', 'instance_variable_set', 'instance_variables',
-                'lambda', 'load', 'local_variables', 'loop',
-                'method', 'method_missing', 'methods', 'module_eval', 'name',
-                'object_id', 'open', 'p', 'print', 'printf', 'private_class_method',
-                'private_instance_methods',
-                'private_methods', 'proc', 'protected_instance_methods',
-                'protected_methods', 'public_class_method',
-                'public_instance_methods', 'public_methods',
-                'putc', 'puts', 'raise', 'rand', 'readline', 'readlines', 'require',
-                'scan', 'select', 'self', 'send', 'set_trace_func', 'singleton_methods', 'sleep',
-                'split', 'sprintf', 'srand', 'sub', 'syscall', 'system', 'taint',
-                'test', 'throw', 'to_a', 'to_s', 'trace_var', 'trap', 'untaint',
-                'untrace_var', 'warn'), prefix=r'(?~!:])|'
-             r'(?<=(?:\s|;)when\s)|'
-             r'(?<=(?:\s|;)or\s)|'
-             r'(?<=(?:\s|;)and\s)|'
-             r'(?<=\.index\s)|'
-             r'(?<=\.scan\s)|'
-             r'(?<=\.sub\s)|'
-             r'(?<=\.sub!\s)|'
-             r'(?<=\.gsub\s)|'
-             r'(?<=\.gsub!\s)|'
-             r'(?<=\.match\s)|'
-             r'(?<=(?:\s|;)if\s)|'
-             r'(?<=(?:\s|;)elsif\s)|'
-             r'(?<=^when\s)|'
-             r'(?<=^index\s)|'
-             r'(?<=^scan\s)|'
-             r'(?<=^sub\s)|'
-             r'(?<=^gsub\s)|'
-             r'(?<=^sub!\s)|'
-             r'(?<=^gsub!\s)|'
-             r'(?<=^match\s)|'
-             r'(?<=^if\s)|'
-             r'(?<=^elsif\s)'
-             r')(\s*)(/)', bygroups(Text, String.Regex), 'multiline-regex'),
-            # multiline regex (in method calls or subscripts)
-            (r'(?<=\(|,|\[)/', String.Regex, 'multiline-regex'),
-            # multiline regex (this time the funny no whitespace rule)
-            (r'(\s+)(/)(?![\s=])', bygroups(Whitespace, String.Regex),
-             'multiline-regex'),
-            # lex numbers and ignore following regular expressions which
-            # are division operators in fact (grrrr. i hate that. any
-            # better ideas?)
-            # since pygments 0.7 we also eat a "?" operator after numbers
-            # so that the char operator does not work. Chars are not allowed
-            # there so that you can use the ternary operator.
-            # stupid example:
-            #   x>=0?n[x]:""
-            (r'(0_?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
-             bygroups(Number.Oct, Whitespace, Operator)),
-            (r'(0x[0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
-             bygroups(Number.Hex, Whitespace, Operator)),
-            (r'(0b[01]+(?:_[01]+)*)(\s*)([/?])?',
-             bygroups(Number.Bin, Whitespace, Operator)),
-            (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
-             bygroups(Number.Integer, Whitespace, Operator)),
-            # Names
-            (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
-            (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
-            (r'\$\w+', Name.Variable.Global),
-            (r'\$[!@&`\'+~=/\\,;.<>_*$?:"^-]', Name.Variable.Global),
-            (r'\$-[0adFiIlpvw]', Name.Variable.Global),
-            (r'::', Operator),
-            include('strings'),
-            # chars
-            (r'\?(\\[MC]-)*'  # modifiers
-             r'(\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})|\S)'
-             r'(?!\w)',
-             String.Char),
-            (r'[A-Z]\w+', Name.Constant),
-            # this is needed because ruby attributes can look
-            # like keywords (class) or like this: ` ?!?
-            (words(RUBY_OPERATORS, prefix=r'(\.|::)'),
-             bygroups(Operator, Name.Operator)),
-            (r'(\.|::)([a-zA-Z_]\w*[!?]?|[*%&^`~+\-/\[<>=])',
-             bygroups(Operator, Name)),
-            (r'[a-zA-Z_]\w*[!?]?', Name),
-            (r'(\[|\]|\*\*|<>?|>=|<=|<=>|=~|={3}|'
-             r'!~|&&?|\|\||\.{1,3})', Operator),
-            (r'[-+/*%=<>&!^|~]=?', Operator),
-            (r'[(){};,/?:\\]', Punctuation),
-            (r'\s+', Whitespace)
-        ],
-        'funcname': [
-            (r'\(', Punctuation, 'defexpr'),
-            (r'(?:([a-zA-Z_]\w*)(\.))?'  # optional scope name, like "self."
-             r'('
-                r'[a-zA-Z\u0080-\uffff][a-zA-Z0-9_\u0080-\uffff]*[!?=]?'  # method name
-                r'|!=|!~|=~|\*\*?|[-+!~]@?|[/%&|^]|<=>|<[<=]?|>[>=]?|===?'  # or operator override
-                r'|\[\]=?'  # or element reference/assignment override
-                r'|`'  # or the undocumented backtick override
-             r')',
-             bygroups(Name.Class, Operator, Name.Function), '#pop'),
-            default('#pop')
-        ],
-        'classname': [
-            (r'\(', Punctuation, 'defexpr'),
-            (r'<<', Operator, '#pop'),
-            (r'[A-Z_]\w*', Name.Class, '#pop'),
-            default('#pop')
-        ],
-        'defexpr': [
-            (r'(\))(\.|::)?', bygroups(Punctuation, Operator), '#pop'),
-            (r'\(', Operator, '#push'),
-            include('root')
-        ],
-        'in-intp': [
-            (r'\{', String.Interpol, '#push'),
-            (r'\}', String.Interpol, '#pop'),
-            include('root'),
-        ],
-        'string-intp': [
-            (r'#\{', String.Interpol, 'in-intp'),
-            (r'#@@?[a-zA-Z_]\w*', String.Interpol),
-            (r'#\$[a-zA-Z_]\w*', String.Interpol)
-        ],
-        'string-intp-escaped': [
-            include('string-intp'),
-            (r'\\([\\abefnrstv#"\']|x[a-fA-F0-9]{1,2}|[0-7]{1,3})',
-             String.Escape)
-        ],
-        'interpolated-regex': [
-            include('string-intp'),
-            (r'[\\#]', String.Regex),
-            (r'[^\\#]+', String.Regex),
-        ],
-        'interpolated-string': [
-            include('string-intp'),
-            (r'[\\#]', String.Other),
-            (r'[^\\#]+', String.Other),
-        ],
-        'multiline-regex': [
-            include('string-intp'),
-            (r'\\\\', String.Regex),
-            (r'\\/', String.Regex),
-            (r'[\\#]', String.Regex),
-            (r'[^\\/#]+', String.Regex),
-            (r'/[mixounse]*', String.Regex, '#pop'),
-        ],
-        'end-part': [
-            (r'.+', Comment.Preproc, '#pop')
-        ]
-    }
-    tokens.update(gen_rubystrings_rules())
-
-    def analyse_text(text):
-        return shebang_matches(text, r'ruby(1\.\d)?')
-
-
-class RubyConsoleLexer(Lexer):
-    """
-    For Ruby interactive console (**irb**) output.
-    """
-    name = 'Ruby irb session'
-    aliases = ['rbcon', 'irb']
-    mimetypes = ['text/x-ruby-shellsession']
-    _example = 'rbcon/console'
-
-    _prompt_re = re.compile(r'irb\([a-zA-Z_]\w*\):\d{3}:\d+[>*"\'] '
-                            r'|>> |\?> ')
-
-    def get_tokens_unprocessed(self, text):
-        rblexer = RubyLexer(**self.options)
-
-        curcode = ''
-        insertions = []
-        for match in line_re.finditer(text):
-            line = match.group()
-            m = self._prompt_re.match(line)
-            if m is not None:
-                end = m.end()
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, line[:end])]))
-                curcode += line[end:]
-            else:
-                if curcode:
-                    yield from do_insertions(
-                        insertions, rblexer.get_tokens_unprocessed(curcode))
-                    curcode = ''
-                    insertions = []
-                yield match.start(), Generic.Output, line
-        if curcode:
-            yield from do_insertions(
-                insertions, rblexer.get_tokens_unprocessed(curcode))
-
-
-class FancyLexer(RegexLexer):
-    """
-    Pygments Lexer For Fancy.
-
-    Fancy is a self-hosted, pure object-oriented, dynamic,
-    class-based, concurrent general-purpose programming language
-    running on Rubinius, the Ruby VM.
-
-    .. versionadded:: 1.5
-    """
-    name = 'Fancy'
-    url = 'https://github.com/bakkdoor/fancy'
-    filenames = ['*.fy', '*.fancypack']
-    aliases = ['fancy', 'fy']
-    mimetypes = ['text/x-fancysrc']
-
-    tokens = {
-        # copied from PerlLexer:
-        'balanced-regex': [
-            (r'/(\\\\|\\[^\\]|[^/\\])*/[egimosx]*', String.Regex, '#pop'),
-            (r'!(\\\\|\\[^\\]|[^!\\])*![egimosx]*', String.Regex, '#pop'),
-            (r'\\(\\\\|[^\\])*\\[egimosx]*', String.Regex, '#pop'),
-            (r'\{(\\\\|\\[^\\]|[^}\\])*\}[egimosx]*', String.Regex, '#pop'),
-            (r'<(\\\\|\\[^\\]|[^>\\])*>[egimosx]*', String.Regex, '#pop'),
-            (r'\[(\\\\|\\[^\\]|[^\]\\])*\][egimosx]*', String.Regex, '#pop'),
-            (r'\((\\\\|\\[^\\]|[^)\\])*\)[egimosx]*', String.Regex, '#pop'),
-            (r'@(\\\\|\\[^\\]|[^@\\])*@[egimosx]*', String.Regex, '#pop'),
-            (r'%(\\\\|\\[^\\]|[^%\\])*%[egimosx]*', String.Regex, '#pop'),
-            (r'\$(\\\\|\\[^\\]|[^$\\])*\$[egimosx]*', String.Regex, '#pop'),
-        ],
-        'root': [
-            (r'\s+', Whitespace),
-
-            # balanced delimiters (copied from PerlLexer):
-            (r's\{(\\\\|\\[^\\]|[^}\\])*\}\s*', String.Regex, 'balanced-regex'),
-            (r's<(\\\\|\\[^\\]|[^>\\])*>\s*', String.Regex, 'balanced-regex'),
-            (r's\[(\\\\|\\[^\\]|[^\]\\])*\]\s*', String.Regex, 'balanced-regex'),
-            (r's\((\\\\|\\[^\\]|[^)\\])*\)\s*', String.Regex, 'balanced-regex'),
-            (r'm?/(\\\\|\\[^\\]|[^///\n])*/[gcimosx]*', String.Regex),
-            (r'm(?=[/!\\{<\[(@%$])', String.Regex, 'balanced-regex'),
-
-            # Comments
-            (r'#(.*?)\n', Comment.Single),
-            # Symbols
-            (r'\'([^\'\s\[\](){}]+|\[\])', String.Symbol),
-            # Multi-line DoubleQuotedString
-            (r'"""(\\\\|\\[^\\]|[^\\])*?"""', String),
-            # DoubleQuotedString
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            # keywords
-            (r'(def|class|try|catch|finally|retry|return|return_local|match|'
-             r'case|->|=>)\b', Keyword),
-            # constants
-            (r'(self|super|nil|false|true)\b', Name.Constant),
-            (r'[(){};,/?|:\\]', Punctuation),
-            # names
-            (words((
-                'Object', 'Array', 'Hash', 'Directory', 'File', 'Class', 'String',
-                'Number', 'Enumerable', 'FancyEnumerable', 'Block', 'TrueClass',
-                'NilClass', 'FalseClass', 'Tuple', 'Symbol', 'Stack', 'Set',
-                'FancySpec', 'Method', 'Package', 'Range'), suffix=r'\b'),
-             Name.Builtin),
-            # functions
-            (r'[a-zA-Z](\w|[-+?!=*/^><%])*:', Name.Function),
-            # operators, must be below functions
-            (r'[-+*/~,<>=&!?%^\[\].$]+', Operator),
-            (r'[A-Z]\w*', Name.Constant),
-            (r'@[a-zA-Z_]\w*', Name.Variable.Instance),
-            (r'@@[a-zA-Z_]\w*', Name.Variable.Class),
-            ('@@?', Operator),
-            (r'[a-zA-Z_]\w*', Name),
-            # numbers - / checks are necessary to avoid mismarking regexes,
-            # see comment in RubyLexer
-            (r'(0[oO]?[0-7]+(?:_[0-7]+)*)(\s*)([/?])?',
-             bygroups(Number.Oct, Whitespace, Operator)),
-            (r'(0[xX][0-9A-Fa-f]+(?:_[0-9A-Fa-f]+)*)(\s*)([/?])?',
-             bygroups(Number.Hex, Whitespace, Operator)),
-            (r'(0[bB][01]+(?:_[01]+)*)(\s*)([/?])?',
-             bygroups(Number.Bin, Whitespace, Operator)),
-            (r'([\d]+(?:_\d+)*)(\s*)([/?])?',
-             bygroups(Number.Integer, Whitespace, Operator)),
-            (r'\d+([eE][+-]?[0-9]+)|\d+\.\d+([eE][+-]?[0-9]+)?', Number.Float),
-            (r'\d+', Number.Integer)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/rust.py b/venv/lib/python3.11/site-packages/pygments/lexers/rust.py
deleted file mode 100644
index db68bb3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/rust.py
+++ /dev/null
@@ -1,223 +0,0 @@
-"""
-    pygments.lexers.rust
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the Rust language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, words, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Whitespace
-
-__all__ = ['RustLexer']
-
-
-class RustLexer(RegexLexer):
-    """
-    Lexer for the Rust programming language (version 1.47).
-
-    .. versionadded:: 1.6
-    """
-    name = 'Rust'
-    url = 'https://www.rust-lang.org/'
-    filenames = ['*.rs', '*.rs.in']
-    aliases = ['rust', 'rs']
-    mimetypes = ['text/rust', 'text/x-rust']
-
-    keyword_types = (words((
-        'u8', 'u16', 'u32', 'u64', 'u128', 'i8', 'i16', 'i32', 'i64', 'i128',
-        'usize', 'isize', 'f32', 'f64', 'char', 'str', 'bool',
-    ), suffix=r'\b'), Keyword.Type)
-
-    builtin_funcs_types = (words((
-        'Copy', 'Send', 'Sized', 'Sync', 'Unpin',
-        'Drop', 'Fn', 'FnMut', 'FnOnce', 'drop',
-        'Box', 'ToOwned', 'Clone',
-        'PartialEq', 'PartialOrd', 'Eq', 'Ord',
-        'AsRef', 'AsMut', 'Into', 'From', 'Default',
-        'Iterator', 'Extend', 'IntoIterator', 'DoubleEndedIterator',
-        'ExactSizeIterator',
-        'Option', 'Some', 'None',
-        'Result', 'Ok', 'Err',
-        'String', 'ToString', 'Vec',
-    ), suffix=r'\b'), Name.Builtin)
-
-    builtin_macros = (words((
-        'asm', 'assert', 'assert_eq', 'assert_ne', 'cfg', 'column',
-        'compile_error', 'concat', 'concat_idents', 'dbg', 'debug_assert',
-        'debug_assert_eq', 'debug_assert_ne', 'env', 'eprint', 'eprintln',
-        'file', 'format', 'format_args', 'format_args_nl', 'global_asm',
-        'include', 'include_bytes', 'include_str',
-        'is_aarch64_feature_detected',
-        'is_arm_feature_detected',
-        'is_mips64_feature_detected',
-        'is_mips_feature_detected',
-        'is_powerpc64_feature_detected',
-        'is_powerpc_feature_detected',
-        'is_x86_feature_detected',
-        'line', 'llvm_asm', 'log_syntax', 'macro_rules', 'matches',
-        'module_path', 'option_env', 'panic', 'print', 'println', 'stringify',
-        'thread_local', 'todo', 'trace_macros', 'unimplemented', 'unreachable',
-        'vec', 'write', 'writeln',
-    ), suffix=r'!'), Name.Function.Magic)
-
-    tokens = {
-        'root': [
-            # rust allows a file to start with a shebang, but if the first line
-            # starts with #![ then it's not a shebang but a crate attribute.
-            (r'#![^[\r\n].*$', Comment.Preproc),
-            default('base'),
-        ],
-        'base': [
-            # Whitespace and Comments
-            (r'\n', Whitespace),
-            (r'\s+', Whitespace),
-            (r'//!.*?\n', String.Doc),
-            (r'///(\n|[^/].*?\n)', String.Doc),
-            (r'//(.*?)\n', Comment.Single),
-            (r'/\*\*(\n|[^/*])', String.Doc, 'doccomment'),
-            (r'/\*!', String.Doc, 'doccomment'),
-            (r'/\*', Comment.Multiline, 'comment'),
-
-            # Macro parameters
-            (r"""\$([a-zA-Z_]\w*|\(,?|\),?|,?)""", Comment.Preproc),
-            # Keywords
-            (words(('as', 'async', 'await', 'box', 'const', 'crate', 'dyn',
-                    'else', 'extern', 'for', 'if', 'impl', 'in', 'loop',
-                    'match', 'move', 'mut', 'pub', 'ref', 'return', 'static',
-                    'super', 'trait', 'unsafe', 'use', 'where', 'while'),
-                   suffix=r'\b'), Keyword),
-            (words(('abstract', 'become', 'do', 'final', 'macro', 'override',
-                    'priv', 'typeof', 'try', 'unsized', 'virtual', 'yield'),
-                   suffix=r'\b'), Keyword.Reserved),
-            (r'(true|false)\b', Keyword.Constant),
-            (r'self\b', Name.Builtin.Pseudo),
-            (r'mod\b', Keyword, 'modname'),
-            (r'let\b', Keyword.Declaration),
-            (r'fn\b', Keyword, 'funcname'),
-            (r'(struct|enum|type|union)\b', Keyword, 'typename'),
-            (r'(default)(\s+)(type|fn)\b', bygroups(Keyword, Text, Keyword)),
-            keyword_types,
-            (r'[sS]elf\b', Name.Builtin.Pseudo),
-            # Prelude (taken from Rust's src/libstd/prelude.rs)
-            builtin_funcs_types,
-            builtin_macros,
-            # Path separators, so types don't catch them.
-            (r'::\b', Text),
-            # Types in positions.
-            (r'(?::|->)', Text, 'typename'),
-            # Labels
-            (r'(break|continue)(\b\s*)(\'[A-Za-z_]\w*)?',
-             bygroups(Keyword, Text.Whitespace, Name.Label)),
-
-            # Character literals
-            (r"""'(\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
-             r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
-             String.Char),
-            (r"""b'(\\['"\\nrt]|\\x[0-9a-fA-F]{2}|\\0"""
-             r"""|\\u\{[0-9a-fA-F]{1,6}\}|.)'""",
-             String.Char),
-
-            # Binary literals
-            (r'0b[01_]+', Number.Bin, 'number_lit'),
-            # Octal literals
-            (r'0o[0-7_]+', Number.Oct, 'number_lit'),
-            # Hexadecimal literals
-            (r'0[xX][0-9a-fA-F_]+', Number.Hex, 'number_lit'),
-            # Decimal literals
-            (r'[0-9][0-9_]*(\.[0-9_]+[eE][+\-]?[0-9_]+|'
-             r'\.[0-9_]*(?!\.)|[eE][+\-]?[0-9_]+)', Number.Float,
-             'number_lit'),
-            (r'[0-9][0-9_]*', Number.Integer, 'number_lit'),
-
-            # String literals
-            (r'b"', String, 'bytestring'),
-            (r'"', String, 'string'),
-            (r'(?s)b?r(#*)".*?"\1', String),
-
-            # Lifetime names
-            (r"'", Operator, 'lifetime'),
-
-            # Operators and Punctuation
-            (r'\.\.=?', Operator),
-            (r'[{}()\[\],.;]', Punctuation),
-            (r'[+\-*/%&|<>^!~@=:?]', Operator),
-
-            # Identifiers
-            (r'[a-zA-Z_]\w*', Name),
-            # Raw identifiers
-            (r'r#[a-zA-Z_]\w*', Name),
-
-            # Attributes
-            (r'#!?\[', Comment.Preproc, 'attribute['),
-
-            # Misc
-            # Lone hashes: not used in Rust syntax, but allowed in macro
-            # arguments, most famously for quote::quote!()
-            (r'#', Text),
-        ],
-        'comment': [
-            (r'[^*/]+', Comment.Multiline),
-            (r'/\*', Comment.Multiline, '#push'),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'[*/]', Comment.Multiline),
-        ],
-        'doccomment': [
-            (r'[^*/]+', String.Doc),
-            (r'/\*', String.Doc, '#push'),
-            (r'\*/', String.Doc, '#pop'),
-            (r'[*/]', String.Doc),
-        ],
-        'modname': [
-            (r'\s+', Text),
-            (r'[a-zA-Z_]\w*', Name.Namespace, '#pop'),
-            default('#pop'),
-        ],
-        'funcname': [
-            (r'\s+', Text),
-            (r'[a-zA-Z_]\w*', Name.Function, '#pop'),
-            default('#pop'),
-        ],
-        'typename': [
-            (r'\s+', Text),
-            (r'&', Keyword.Pseudo),
-            (r"'", Operator, 'lifetime'),
-            builtin_funcs_types,
-            keyword_types,
-            (r'[a-zA-Z_]\w*', Name.Class, '#pop'),
-            default('#pop'),
-        ],
-        'lifetime': [
-            (r"(static|_)", Name.Builtin),
-            (r"[a-zA-Z_]+\w*", Name.Attribute),
-            default('#pop'),
-        ],
-        'number_lit': [
-            (r'[ui](8|16|32|64|size)', Keyword, '#pop'),
-            (r'f(32|64)', Keyword, '#pop'),
-            default('#pop'),
-        ],
-        'string': [
-            (r'"', String, '#pop'),
-            (r"""\\['"\\nrt]|\\x[0-7][0-9a-fA-F]|\\0"""
-             r"""|\\u\{[0-9a-fA-F]{1,6}\}""", String.Escape),
-            (r'[^\\"]+', String),
-            (r'\\', String),
-        ],
-        'bytestring': [
-            (r"""\\x[89a-fA-F][0-9a-fA-F]""", String.Escape),
-            include('string'),
-        ],
-        'attribute_common': [
-            (r'"', String, 'string'),
-            (r'\[', Comment.Preproc, 'attribute['),
-        ],
-        'attribute[': [
-            include('attribute_common'),
-            (r'\]', Comment.Preproc, '#pop'),
-            (r'[^"\]\[]+', Comment.Preproc),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sas.py b/venv/lib/python3.11/site-packages/pygments/lexers/sas.py
deleted file mode 100644
index c34066b..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sas.py
+++ /dev/null
@@ -1,227 +0,0 @@
-"""
-    pygments.lexers.sas
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexer for SAS.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-from pygments.lexer import RegexLexer, include, words
-from pygments.token import Comment, Keyword, Name, Number, String, Text, \
-    Other, Generic
-
-__all__ = ['SASLexer']
-
-
-class SASLexer(RegexLexer):
-    """
-    For SAS files.
-
-    .. versionadded:: 2.2
-    """
-    # Syntax from syntax/sas.vim by James Kidd 
-
-    name      = 'SAS'
-    aliases   = ['sas']
-    filenames = ['*.SAS', '*.sas']
-    mimetypes = ['text/x-sas', 'text/sas', 'application/x-sas']
-    flags     = re.IGNORECASE | re.MULTILINE
-
-    builtins_macros = (
-        "bquote", "nrbquote", "cmpres", "qcmpres", "compstor", "datatyp",
-        "display", "do", "else", "end", "eval", "global", "goto", "if",
-        "index", "input", "keydef", "label", "left", "length", "let",
-        "local", "lowcase", "macro", "mend", "nrquote",
-        "nrstr", "put", "qleft", "qlowcase", "qscan",
-        "qsubstr", "qsysfunc", "qtrim", "quote", "qupcase", "scan",
-        "str", "substr", "superq", "syscall", "sysevalf", "sysexec",
-        "sysfunc", "sysget", "syslput", "sysprod", "sysrc", "sysrput",
-        "then", "to", "trim", "unquote", "until", "upcase", "verify",
-        "while", "window"
-    )
-
-    builtins_conditionals = (
-        "do", "if", "then", "else", "end", "until", "while"
-    )
-
-    builtins_statements = (
-        "abort", "array", "attrib", "by", "call", "cards", "cards4",
-        "catname", "continue", "datalines", "datalines4", "delete", "delim",
-        "delimiter", "display", "dm", "drop", "endsas", "error", "file",
-        "filename", "footnote", "format", "goto", "in", "infile", "informat",
-        "input", "keep", "label", "leave", "length", "libname", "link",
-        "list", "lostcard", "merge", "missing", "modify", "options", "output",
-        "out", "page", "put", "redirect", "remove", "rename", "replace",
-        "retain", "return", "select", "set", "skip", "startsas", "stop",
-        "title", "update", "waitsas", "where", "window", "x", "systask"
-    )
-
-    builtins_sql = (
-        "add", "and", "alter", "as", "cascade", "check", "create",
-        "delete", "describe", "distinct", "drop", "foreign", "from",
-        "group", "having", "index", "insert", "into", "in", "key", "like",
-        "message", "modify", "msgtype", "not", "null", "on", "or",
-        "order", "primary", "references", "reset", "restrict", "select",
-        "set", "table", "unique", "update", "validate", "view", "where"
-    )
-
-    builtins_functions = (
-        "abs", "addr", "airy", "arcos", "arsin", "atan", "attrc",
-        "attrn", "band", "betainv", "blshift", "bnot", "bor",
-        "brshift", "bxor", "byte", "cdf", "ceil", "cexist", "cinv",
-        "close", "cnonct", "collate", "compbl", "compound",
-        "compress", "cos", "cosh", "css", "curobs", "cv", "daccdb",
-        "daccdbsl", "daccsl", "daccsyd", "dacctab", "dairy", "date",
-        "datejul", "datepart", "datetime", "day", "dclose", "depdb",
-        "depdbsl", "depsl", "depsyd",
-        "deptab", "dequote", "dhms", "dif", "digamma",
-        "dim", "dinfo", "dnum", "dopen", "doptname", "doptnum",
-        "dread", "dropnote", "dsname", "erf", "erfc", "exist", "exp",
-        "fappend", "fclose", "fcol", "fdelete", "fetch", "fetchobs",
-        "fexist", "fget", "fileexist", "filename", "fileref",
-        "finfo", "finv", "fipname", "fipnamel", "fipstate", "floor",
-        "fnonct", "fnote", "fopen", "foptname", "foptnum", "fpoint",
-        "fpos", "fput", "fread", "frewind", "frlen", "fsep", "fuzz",
-        "fwrite", "gaminv", "gamma", "getoption", "getvarc", "getvarn",
-        "hbound", "hms", "hosthelp", "hour", "ibessel", "index",
-        "indexc", "indexw", "input", "inputc", "inputn", "int",
-        "intck", "intnx", "intrr", "irr", "jbessel", "juldate",
-        "kurtosis", "lag", "lbound", "left", "length", "lgamma",
-        "libname", "libref", "log", "log10", "log2", "logpdf", "logpmf",
-        "logsdf", "lowcase", "max", "mdy", "mean", "min", "minute",
-        "mod", "month", "mopen", "mort", "n", "netpv", "nmiss",
-        "normal", "note", "npv", "open", "ordinal", "pathname",
-        "pdf", "peek", "peekc", "pmf", "point", "poisson", "poke",
-        "probbeta", "probbnml", "probchi", "probf", "probgam",
-        "probhypr", "probit", "probnegb", "probnorm", "probt",
-        "put", "putc", "putn", "qtr", "quote", "ranbin", "rancau",
-        "ranexp", "rangam", "range", "rank", "rannor", "ranpoi",
-        "rantbl", "rantri", "ranuni", "repeat", "resolve", "reverse",
-        "rewind", "right", "round", "saving", "scan", "sdf", "second",
-        "sign", "sin", "sinh", "skewness", "soundex", "spedis",
-        "sqrt", "std", "stderr", "stfips", "stname", "stnamel",
-        "substr", "sum", "symget", "sysget", "sysmsg", "sysprod",
-        "sysrc", "system", "tan", "tanh", "time", "timepart", "tinv",
-        "tnonct", "today", "translate", "tranwrd", "trigamma",
-        "trim", "trimn", "trunc", "uniform", "upcase", "uss", "var",
-        "varfmt", "varinfmt", "varlabel", "varlen", "varname",
-        "varnum", "varray", "varrayx", "vartype", "verify", "vformat",
-        "vformatd", "vformatdx", "vformatn", "vformatnx", "vformatw",
-        "vformatwx", "vformatx", "vinarray", "vinarrayx", "vinformat",
-        "vinformatd", "vinformatdx", "vinformatn", "vinformatnx",
-        "vinformatw", "vinformatwx", "vinformatx", "vlabel",
-        "vlabelx", "vlength", "vlengthx", "vname", "vnamex", "vtype",
-        "vtypex", "weekday", "year", "yyq", "zipfips", "zipname",
-        "zipnamel", "zipstate"
-    )
-
-    tokens = {
-        'root': [
-            include('comments'),
-            include('proc-data'),
-            include('cards-datalines'),
-            include('logs'),
-            include('general'),
-            (r'.', Text),
-        ],
-        # SAS is multi-line regardless, but * is ended by ;
-        'comments': [
-            (r'^\s*\*.*?;', Comment),
-            (r'/\*.*?\*/', Comment),
-            (r'^\s*\*(.|\n)*?;', Comment.Multiline),
-            (r'/[*](.|\n)*?[*]/', Comment.Multiline),
-        ],
-        # Special highlight for proc, data, quit, run
-        'proc-data': [
-            (r'(^|;)\s*(proc \w+|data|run|quit)[\s;]',
-             Keyword.Reserved),
-        ],
-        # Special highlight cards and datalines
-        'cards-datalines': [
-            (r'^\s*(datalines|cards)\s*;\s*$', Keyword, 'data'),
-        ],
-        'data': [
-            (r'(.|\n)*^\s*;\s*$', Other, '#pop'),
-        ],
-        # Special highlight for put NOTE|ERROR|WARNING (order matters)
-        'logs': [
-            (r'\n?^\s*%?put ', Keyword, 'log-messages'),
-        ],
-        'log-messages': [
-            (r'NOTE(:|-).*', Generic, '#pop'),
-            (r'WARNING(:|-).*', Generic.Emph, '#pop'),
-            (r'ERROR(:|-).*', Generic.Error, '#pop'),
-            include('general'),
-        ],
-        'general': [
-            include('keywords'),
-            include('vars-strings'),
-            include('special'),
-            include('numbers'),
-        ],
-        # Keywords, statements, functions, macros
-        'keywords': [
-            (words(builtins_statements,
-                   prefix = r'\b',
-                   suffix = r'\b'),
-             Keyword),
-            (words(builtins_sql,
-                   prefix = r'\b',
-                   suffix = r'\b'),
-             Keyword),
-            (words(builtins_conditionals,
-                   prefix = r'\b',
-                   suffix = r'\b'),
-             Keyword),
-            (words(builtins_macros,
-                   prefix = r'%',
-                   suffix = r'\b'),
-             Name.Builtin),
-            (words(builtins_functions,
-                   prefix = r'\b',
-                   suffix = r'\('),
-             Name.Builtin),
-        ],
-        # Strings and user-defined variables and macros (order matters)
-        'vars-strings': [
-            (r'&[a-z_]\w{0,31}\.?', Name.Variable),
-            (r'%[a-z_]\w{0,31}', Name.Function),
-            (r'\'', String, 'string_squote'),
-            (r'"', String, 'string_dquote'),
-        ],
-        'string_squote': [
-            ('\'', String, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape),
-            # AFAIK, macro variables are not evaluated in single quotes
-            # (r'&', Name.Variable, 'validvar'),
-            (r'[^$\'\\]+', String),
-            (r'[$\'\\]', String),
-        ],
-        'string_dquote': [
-            (r'"', String, '#pop'),
-            (r'\\\\|\\"|\\\n', String.Escape),
-            (r'&', Name.Variable, 'validvar'),
-            (r'[^$&"\\]+', String),
-            (r'[$"\\]', String),
-        ],
-        'validvar': [
-            (r'[a-z_]\w{0,31}\.?', Name.Variable, '#pop'),
-        ],
-        # SAS numbers and special variables
-        'numbers': [
-            (r'\b[+-]?([0-9]+(\.[0-9]+)?|\.[0-9]+|\.)(E[+-]?[0-9]+)?i?\b',
-             Number),
-        ],
-        'special': [
-            (r'(null|missing|_all_|_automatic_|_character_|_n_|'
-             r'_infile_|_name_|_null_|_numeric_|_user_|_webout_)',
-             Keyword.Constant),
-        ],
-        # 'operators': [
-        #     (r'(-|=|<=|>=|<|>|<>|&|!=|'
-        #      r'\||\*|\+|\^|/|!|~|~=)', Operator)
-        # ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/savi.py b/venv/lib/python3.11/site-packages/pygments/lexers/savi.py
deleted file mode 100644
index 48927f3..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/savi.py
+++ /dev/null
@@ -1,170 +0,0 @@
-"""
-    pygments.lexers.savi
-    ~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Savi.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include
-from pygments.token import Whitespace, Keyword, Name, String, Number, \
-  Operator, Punctuation, Comment, Generic, Error
-
-__all__ = ['SaviLexer']
-
-
-# The canonical version of this file can be found in the following repository,
-# where it is kept in sync with any language changes, as well as the other
-# pygments-like lexers that are maintained for use with other tools:
-# - https://github.com/savi-lang/savi/blob/main/tooling/pygments/lexers/savi.py
-#
-# If you're changing this file in the pygments repository, please ensure that
-# any changes you make are also propagated to the official Savi repository,
-# in order to avoid accidental clobbering of your changes later when an update
-# from the Savi repository flows forward into the pygments repository.
-#
-# If you're changing this file in the Savi repository, please ensure that
-# any changes you make are also reflected in the other pygments-like lexers
-# (rouge, vscode, etc) so that all of the lexers can be kept cleanly in sync.
-
-class SaviLexer(RegexLexer):
-  """
-  For Savi source code.
-
-  .. versionadded: 2.10
-  """
-
-  name = 'Savi'
-  url = 'https://github.com/savi-lang/savi'
-  aliases = ['savi']
-  filenames = ['*.savi']
-
-  tokens = {
-    "root": [
-      # Line Comment
-      (r'//.*?$', Comment.Single),
-
-      # Doc Comment
-      (r'::.*?$', Comment.Single),
-
-      # Capability Operator
-      (r'(\')(\w+)(?=[^\'])', bygroups(Operator, Name)),
-
-      # Double-Quote String
-      (r'\w?"', String.Double, "string.double"),
-
-      # Single-Char String
-      (r"'", String.Char, "string.char"),
-
-      # Type Name
-      (r'(_?[A-Z]\w*)', Name.Class),
-
-      # Nested Type Name
-      (r'(\.)(\s*)(_?[A-Z]\w*)', bygroups(Punctuation, Whitespace, Name.Class)),
-
-      # Declare
-      (r'^([ \t]*)(:\w+)',
-        bygroups(Whitespace, Name.Tag),
-        "decl"),
-
-      # Error-Raising Calls/Names
-      (r'((\w+|\+|\-|\*)\!)', Generic.Deleted),
-
-      # Numeric Values
-      (r'\b\d([\d_]*(\.[\d_]+)?)\b', Number),
-
-      # Hex Numeric Values
-      (r'\b0x([0-9a-fA-F_]+)\b', Number.Hex),
-
-      # Binary Numeric Values
-      (r'\b0b([01_]+)\b', Number.Bin),
-
-      # Function Call (with braces)
-      (r'\w+(?=\()', Name.Function),
-
-      # Function Call (with receiver)
-      (r'(\.)(\s*)(\w+)', bygroups(Punctuation, Whitespace, Name.Function)),
-
-      # Function Call (with self receiver)
-      (r'(@)(\w+)', bygroups(Punctuation, Name.Function)),
-
-      # Parenthesis
-      (r'\(', Punctuation, "root"),
-      (r'\)', Punctuation, "#pop"),
-
-      # Brace
-      (r'\{', Punctuation, "root"),
-      (r'\}', Punctuation, "#pop"),
-
-      # Bracket
-      (r'\[', Punctuation, "root"),
-      (r'(\])(\!)', bygroups(Punctuation, Generic.Deleted), "#pop"),
-      (r'\]', Punctuation, "#pop"),
-
-      # Punctuation
-      (r'[,;:\.@]', Punctuation),
-
-      # Piping Operators
-      (r'(\|\>)', Operator),
-
-      # Branching Operators
-      (r'(\&\&|\|\||\?\?|\&\?|\|\?|\.\?)', Operator),
-
-      # Comparison Operators
-      (r'(\<\=\>|\=\~|\=\=|\<\=|\>\=|\<|\>)', Operator),
-
-      # Arithmetic Operators
-      (r'(\+|\-|\/|\*|\%)', Operator),
-
-      # Assignment Operators
-      (r'(\=)', Operator),
-
-      # Other Operators
-      (r'(\!|\<\<|\<|\&|\|)', Operator),
-
-      # Identifiers
-      (r'\b\w+\b', Name),
-
-      # Whitespace
-      (r'[ \t\r]+\n*|\n+', Whitespace),
-    ],
-
-    # Declare (nested rules)
-    "decl": [
-      (r'\b[a-z_]\w*\b(?!\!)', Keyword.Declaration),
-      (r':', Punctuation, "#pop"),
-      (r'\n', Whitespace, "#pop"),
-      include("root"),
-    ],
-
-    # Double-Quote String (nested rules)
-    "string.double": [
-      (r'\\\(', String.Interpol, "string.interpolation"),
-      (r'\\u[0-9a-fA-F]{4}', String.Escape),
-      (r'\\x[0-9a-fA-F]{2}', String.Escape),
-      (r'\\[bfnrt\\\']', String.Escape),
-      (r'\\"', String.Escape),
-      (r'"', String.Double, "#pop"),
-      (r'[^\\"]+', String.Double),
-      (r'.', Error),
-    ],
-
-    # Single-Char String (nested rules)
-    "string.char": [
-      (r'\\u[0-9a-fA-F]{4}', String.Escape),
-      (r'\\x[0-9a-fA-F]{2}', String.Escape),
-      (r'\\[bfnrt\\\']', String.Escape),
-      (r"\\'", String.Escape),
-      (r"'", String.Char, "#pop"),
-      (r"[^\\']+", String.Char),
-      (r'.', Error),
-    ],
-
-    # Interpolation inside String (nested rules)
-    "string.interpolation": [
-      (r"\)", String.Interpol, "#pop"),
-      include("root"),
-    ]
-  }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py b/venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py
deleted file mode 100644
index 90478ac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/scdoc.py
+++ /dev/null
@@ -1,86 +0,0 @@
-"""
-    pygments.lexers.scdoc
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for scdoc, a simple man page generator.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, using, this
-from pygments.token import Text, Comment, Keyword, String, Generic
-
-__all__ = ['ScdocLexer']
-
-
-class ScdocLexer(RegexLexer):
-    """
-    `scdoc` is a simple man page generator for POSIX systems written in C99.
-
-    .. versionadded:: 2.5
-    """
-    name = 'scdoc'
-    url = 'https://git.sr.ht/~sircmpwn/scdoc'
-    aliases = ['scdoc', 'scd']
-    filenames = ['*.scd', '*.scdoc']
-    flags = re.MULTILINE
-
-    tokens = {
-        'root': [
-            # comment
-            (r'^(;.+\n)', bygroups(Comment)),
-
-            # heading with pound prefix
-            (r'^(#)([^#].+\n)', bygroups(Generic.Heading, Text)),
-            (r'^(#{2})(.+\n)', bygroups(Generic.Subheading, Text)),
-            # bulleted lists
-            (r'^(\s*)([*-])(\s)(.+\n)',
-            bygroups(Text, Keyword, Text, using(this, state='inline'))),
-            # numbered lists
-            (r'^(\s*)(\.+\.)( .+\n)',
-            bygroups(Text, Keyword, using(this, state='inline'))),
-            # quote
-            (r'^(\s*>\s)(.+\n)', bygroups(Keyword, Generic.Emph)),
-            # text block
-            (r'^(```\n)([\w\W]*?)(^```$)', bygroups(String, Text, String)),
-
-            include('inline'),
-        ],
-        'inline': [
-            # escape
-            (r'\\.', Text),
-            # underlines
-            (r'(\s)(_[^_]+_)(\W|\n)', bygroups(Text, Generic.Emph, Text)),
-            # bold
-            (r'(\s)(\*[^*]+\*)(\W|\n)', bygroups(Text, Generic.Strong, Text)),
-            # inline code
-            (r'`[^`]+`', String.Backtick),
-
-            # general text, must come last!
-            (r'[^\\\s]+', Text),
-            (r'.', Text),
-        ],
-    }
-
-    def analyse_text(text):
-        """We checks for bold and underline text with * and _. Also
-        every scdoc file must start with a strictly defined first line."""
-        result = 0
-
-        if '*' in text:
-            result += 0.01
-
-        if '_' in text:
-            result += 0.01
-
-        # name(section) ["left_footer" ["center_header"]]
-        first_line = text.partition('\n')[0]
-        scdoc_preamble_pattern = r'^.*\([1-7]\)( "[^"]+"){0,2}$'
-
-        if re.search(scdoc_preamble_pattern, first_line):
-            result += 0.5
-
-        return result
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/scripting.py b/venv/lib/python3.11/site-packages/pygments/lexers/scripting.py
deleted file mode 100644
index eab7ec9..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/scripting.py
+++ /dev/null
@@ -1,1286 +0,0 @@
-"""
-    pygments.lexers.scripting
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for scripting and embedded languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import RegexLexer, include, bygroups, default, combined, \
-    words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Error, Whitespace, Other
-from pygments.util import get_bool_opt, get_list_opt
-
-__all__ = ['LuaLexer', 'MoonScriptLexer', 'ChaiscriptLexer', 'LSLLexer',
-           'AppleScriptLexer', 'RexxLexer', 'MOOCodeLexer', 'HybrisLexer',
-           'EasytrieveLexer', 'JclLexer', 'MiniScriptLexer']
-
-
-class LuaLexer(RegexLexer):
-    """
-    For Lua source code.
-
-    Additional options accepted:
-
-    `func_name_highlighting`
-        If given and ``True``, highlight builtin function names
-        (default: ``True``).
-    `disabled_modules`
-        If given, must be a list of module names whose function names
-        should not be highlighted. By default all modules are highlighted.
-
-        To get a list of allowed modules have a look into the
-        `_lua_builtins` module:
-
-        .. sourcecode:: pycon
-
-            >>> from pygments.lexers._lua_builtins import MODULES
-            >>> MODULES.keys()
-            ['string', 'coroutine', 'modules', 'io', 'basic', ...]
-    """
-
-    name = 'Lua'
-    url = 'https://www.lua.org/'
-    aliases = ['lua']
-    filenames = ['*.lua', '*.wlua']
-    mimetypes = ['text/x-lua', 'application/x-lua']
-
-    _comment_multiline = r'(?:--\[(?P=*)\[[\w\W]*?\](?P=level)\])'
-    _comment_single = r'(?:--.*$)'
-    _space = r'(?:\s+)'
-    _s = r'(?:%s|%s|%s)' % (_comment_multiline, _comment_single, _space)
-    _name = r'(?:[^\W\d]\w*)'
-
-    tokens = {
-        'root': [
-            # Lua allows a file to start with a shebang.
-            (r'#!.*', Comment.Preproc),
-            default('base'),
-        ],
-        'ws': [
-            (_comment_multiline, Comment.Multiline),
-            (_comment_single, Comment.Single),
-            (_space, Text),
-        ],
-        'base': [
-            include('ws'),
-
-            (r'(?i)0x[\da-f]*(\.[\da-f]*)?(p[+-]?\d+)?', Number.Hex),
-            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
-            (r'(?i)\d+e[+-]?\d+', Number.Float),
-            (r'\d+', Number.Integer),
-
-            # multiline strings
-            (r'(?s)\[(=*)\[.*?\]\1\]', String),
-
-            (r'::', Punctuation, 'label'),
-            (r'\.{3}', Punctuation),
-            (r'[=<>|~&+\-*/%#^]+|\.\.', Operator),
-            (r'[\[\]{}().,:;]', Punctuation),
-            (r'(and|or|not)\b', Operator.Word),
-
-            ('(break|do|else|elseif|end|for|if|in|repeat|return|then|until|'
-             r'while)\b', Keyword.Reserved),
-            (r'goto\b', Keyword.Reserved, 'goto'),
-            (r'(local)\b', Keyword.Declaration),
-            (r'(true|false|nil)\b', Keyword.Constant),
-
-            (r'(function)\b', Keyword.Reserved, 'funcname'),
-
-            (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
-
-            ("'", String.Single, combined('stringescape', 'sqs')),
-            ('"', String.Double, combined('stringescape', 'dqs'))
-        ],
-
-        'funcname': [
-            include('ws'),
-            (r'[.:]', Punctuation),
-            (r'%s(?=%s*[.:])' % (_name, _s), Name.Class),
-            (_name, Name.Function, '#pop'),
-            # inline function
-            (r'\(', Punctuation, '#pop'),
-        ],
-
-        'goto': [
-            include('ws'),
-            (_name, Name.Label, '#pop'),
-        ],
-
-        'label': [
-            include('ws'),
-            (r'::', Punctuation, '#pop'),
-            (_name, Name.Label),
-        ],
-
-        'stringescape': [
-            (r'\\([abfnrtv\\"\']|[\r\n]{1,2}|z\s*|x[0-9a-fA-F]{2}|\d{1,3}|'
-             r'u\{[0-9a-fA-F]+\})', String.Escape),
-        ],
-
-        'sqs': [
-            (r"'", String.Single, '#pop'),
-            (r"[^\\']+", String.Single),
-        ],
-
-        'dqs': [
-            (r'"', String.Double, '#pop'),
-            (r'[^\\"]+', String.Double),
-        ]
-    }
-
-    def __init__(self, **options):
-        self.func_name_highlighting = get_bool_opt(
-            options, 'func_name_highlighting', True)
-        self.disabled_modules = get_list_opt(options, 'disabled_modules', [])
-
-        self._functions = set()
-        if self.func_name_highlighting:
-            from pygments.lexers._lua_builtins import MODULES
-            for mod, func in MODULES.items():
-                if mod not in self.disabled_modules:
-                    self._functions.update(func)
-        RegexLexer.__init__(self, **options)
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in \
-                RegexLexer.get_tokens_unprocessed(self, text):
-            if token is Name:
-                if value in self._functions:
-                    yield index, Name.Builtin, value
-                    continue
-                elif '.' in value:
-                    a, b = value.split('.')
-                    yield index, Name, a
-                    yield index + len(a), Punctuation, '.'
-                    yield index + len(a) + 1, Name, b
-                    continue
-            yield index, token, value
-
-class MoonScriptLexer(LuaLexer):
-    """
-    For MoonScript source code.
-
-    .. versionadded:: 1.5
-    """
-
-    name = 'MoonScript'
-    url = 'http://moonscript.org'
-    aliases = ['moonscript', 'moon']
-    filenames = ['*.moon']
-    mimetypes = ['text/x-moonscript', 'application/x-moonscript']
-
-    tokens = {
-        'root': [
-            (r'#!(.*?)$', Comment.Preproc),
-            default('base'),
-        ],
-        'base': [
-            ('--.*$', Comment.Single),
-            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number.Float),
-            (r'(?i)\d+e[+-]?\d+', Number.Float),
-            (r'(?i)0x[0-9a-f]*', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r'\n', Whitespace),
-            (r'[^\S\n]+', Text),
-            (r'(?s)\[(=*)\[.*?\]\1\]', String),
-            (r'(->|=>)', Name.Function),
-            (r':[a-zA-Z_]\w*', Name.Variable),
-            (r'(==|!=|~=|<=|>=|\.\.\.|\.\.|[=+\-*/%^<>#!.\\:])', Operator),
-            (r'[;,]', Punctuation),
-            (r'[\[\]{}()]', Keyword.Type),
-            (r'[a-zA-Z_]\w*:', Name.Variable),
-            (words((
-                'class', 'extends', 'if', 'then', 'super', 'do', 'with',
-                'import', 'export', 'while', 'elseif', 'return', 'for', 'in',
-                'from', 'when', 'using', 'else', 'and', 'or', 'not', 'switch',
-                'break'), suffix=r'\b'),
-             Keyword),
-            (r'(true|false|nil)\b', Keyword.Constant),
-            (r'(and|or|not)\b', Operator.Word),
-            (r'(self)\b', Name.Builtin.Pseudo),
-            (r'@@?([a-zA-Z_]\w*)?', Name.Variable.Class),
-            (r'[A-Z]\w*', Name.Class),  # proper name
-            (r'[A-Za-z_]\w*(\.[A-Za-z_]\w*)?', Name),
-            ("'", String.Single, combined('stringescape', 'sqs')),
-            ('"', String.Double, combined('stringescape', 'dqs'))
-        ],
-        'stringescape': [
-            (r'''\\([abfnrtv\\"']|\d{1,3})''', String.Escape)
-        ],
-        'sqs': [
-            ("'", String.Single, '#pop'),
-            ("[^']+", String)
-        ],
-        'dqs': [
-            ('"', String.Double, '#pop'),
-            ('[^"]+', String)
-        ]
-    }
-
-    def get_tokens_unprocessed(self, text):
-        # set . as Operator instead of Punctuation
-        for index, token, value in LuaLexer.get_tokens_unprocessed(self, text):
-            if token == Punctuation and value == ".":
-                token = Operator
-            yield index, token, value
-
-
-class ChaiscriptLexer(RegexLexer):
-    """
-    For ChaiScript source code.
-
-    .. versionadded:: 2.0
-    """
-
-    name = 'ChaiScript'
-    url = 'http://chaiscript.com/'
-    aliases = ['chaiscript', 'chai']
-    filenames = ['*.chai']
-    mimetypes = ['text/x-chaiscript', 'application/x-chaiscript']
-
-    flags = re.DOTALL | re.MULTILINE
-
-    tokens = {
-        'commentsandwhitespace': [
-            (r'\s+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'^\#.*?\n', Comment.Single)
-        ],
-        'slashstartsregex': [
-            include('commentsandwhitespace'),
-            (r'/(\\.|[^[/\\\n]|\[(\\.|[^\]\\\n])*])+/'
-             r'([gim]+\b|\B)', String.Regex, '#pop'),
-            (r'(?=/)', Text, ('#pop', 'badregex')),
-            default('#pop')
-        ],
-        'badregex': [
-            (r'\n', Text, '#pop')
-        ],
-        'root': [
-            include('commentsandwhitespace'),
-            (r'\n', Text),
-            (r'[^\S\n]+', Text),
-            (r'\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|\.\.'
-             r'(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?', Operator, 'slashstartsregex'),
-            (r'[{(\[;,]', Punctuation, 'slashstartsregex'),
-            (r'[})\].]', Punctuation),
-            (r'[=+\-*/]', Operator),
-            (r'(for|in|while|do|break|return|continue|if|else|'
-             r'throw|try|catch'
-             r')\b', Keyword, 'slashstartsregex'),
-            (r'(var)\b', Keyword.Declaration, 'slashstartsregex'),
-            (r'(attr|def|fun)\b', Keyword.Reserved),
-            (r'(true|false)\b', Keyword.Constant),
-            (r'(eval|throw)\b', Name.Builtin),
-            (r'`\S+`', Name.Builtin),
-            (r'[$a-zA-Z_]\w*', Name.Other),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-fA-F]+', Number.Hex),
-            (r'[0-9]+', Number.Integer),
-            (r'"', String.Double, 'dqstring'),
-            (r"'(\\\\|\\[^\\]|[^'\\])*'", String.Single),
-        ],
-        'dqstring': [
-            (r'\$\{[^"}]+?\}', String.Interpol),
-            (r'\$', String.Double),
-            (r'\\\\', String.Double),
-            (r'\\"', String.Double),
-            (r'[^\\"$]+', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-    }
-
-
-class LSLLexer(RegexLexer):
-    """
-    For Second Life's Linden Scripting Language source code.
-
-    .. versionadded:: 2.0
-    """
-
-    name = 'LSL'
-    aliases = ['lsl']
-    filenames = ['*.lsl']
-    mimetypes = ['text/x-lsl']
-
-    flags = re.MULTILINE
-
-    lsl_keywords = r'\b(?:do|else|for|if|jump|return|while)\b'
-    lsl_types = r'\b(?:float|integer|key|list|quaternion|rotation|string|vector)\b'
-    lsl_states = r'\b(?:(?:state)\s+\w+|default)\b'
-    lsl_events = r'\b(?:state_(?:entry|exit)|touch(?:_(?:start|end))?|(?:land_)?collision(?:_(?:start|end))?|timer|listen|(?:no_)?sensor|control|(?:not_)?at_(?:rot_)?target|money|email|run_time_permissions|changed|attach|dataserver|moving_(?:start|end)|link_message|(?:on|object)_rez|remote_data|http_re(?:sponse|quest)|path_update|transaction_result)\b'
-    lsl_functions_builtin = r'\b(?:ll(?:ReturnObjectsBy(?:ID|Owner)|Json(?:2List|[GS]etValue|ValueType)|Sin|Cos|Tan|Atan2|Sqrt|Pow|Abs|Fabs|Frand|Floor|Ceil|Round|Vec(?:Mag|Norm|Dist)|Rot(?:Between|2(?:Euler|Fwd|Left|Up))|(?:Euler|Axes)2Rot|Whisper|(?:Region|Owner)?Say|Shout|Listen(?:Control|Remove)?|Sensor(?:Repeat|Remove)?|Detected(?:Name|Key|Owner|Type|Pos|Vel|Grab|Rot|Group|LinkNumber)|Die|Ground|Wind|(?:[GS]et)(?:AnimationOverride|MemoryLimit|PrimMediaParams|ParcelMusicURL|Object(?:Desc|Name)|PhysicsMaterial|Status|Scale|Color|Alpha|Texture|Pos|Rot|Force|Torque)|ResetAnimationOverride|(?:Scale|Offset|Rotate)Texture|(?:Rot)?Target(?:Remove)?|(?:Stop)?MoveToTarget|Apply(?:Rotational)?Impulse|Set(?:KeyframedMotion|ContentType|RegionPos|(?:Angular)?Velocity|Buoyancy|HoverHeight|ForceAndTorque|TimerEvent|ScriptState|Damage|TextureAnim|Sound(?:Queueing|Radius)|Vehicle(?:Type|(?:Float|Vector|Rotation)Param)|(?:Touch|Sit)?Text|Camera(?:Eye|At)Offset|PrimitiveParams|ClickAction|Link(?:Alpha|Color|PrimitiveParams(?:Fast)?|Texture(?:Anim)?|Camera|Media)|RemoteScriptAccessPin|PayPrice|LocalRot)|ScaleByFactor|Get(?:(?:Max|Min)ScaleFactor|ClosestNavPoint|StaticPath|SimStats|Env|PrimitiveParams|Link(?:PrimitiveParams|Number(?:OfSides)?|Key|Name|Media)|HTTPHeader|FreeURLs|Object(?:Details|PermMask|PrimCount)|Parcel(?:MaxPrims|Details|Prim(?:Count|Owners))|Attached|(?:SPMax|Free|Used)Memory|Region(?:Name|TimeDilation|FPS|Corner|AgentCount)|Root(?:Position|Rotation)|UnixTime|(?:Parcel|Region)Flags|(?:Wall|GMT)clock|SimulatorHostname|BoundingBox|GeometricCenter|Creator|NumberOf(?:Prims|NotecardLines|Sides)|Animation(?:List)?|(?:Camera|Local)(?:Pos|Rot)|Vel|Accel|Omega|Time(?:stamp|OfDay)|(?:Object|CenterOf)?Mass|MassMKS|Energy|Owner|(?:Owner)?Key|SunDirection|Texture(?:Offset|Scale|Rot)|Inventory(?:Number|Name|Key|Type|Creator|PermMask)|Permissions(?:Key)?|StartParameter|List(?:Length|EntryType)|Date|Agent(?:Size|Info|Language|List)|LandOwnerAt|NotecardLine|Script(?:Name|State))|(?:Get|Reset|GetAndReset)Time|PlaySound(?:Slave)?|LoopSound(?:Master|Slave)?|(?:Trigger|Stop|Preload)Sound|(?:(?:Get|Delete)Sub|Insert)String|To(?:Upper|Lower)|Give(?:InventoryList|Money)|RezObject|(?:Stop)?LookAt|Sleep|CollisionFilter|(?:Take|Release)Controls|DetachFromAvatar|AttachToAvatar(?:Temp)?|InstantMessage|(?:GetNext)?Email|StopHover|MinEventDelay|RotLookAt|String(?:Length|Trim)|(?:Start|Stop)Animation|TargetOmega|RequestPermissions|(?:Create|Break)Link|BreakAllLinks|(?:Give|Remove)Inventory|Water|PassTouches|Request(?:Agent|Inventory)Data|TeleportAgent(?:Home|GlobalCoords)?|ModifyLand|CollisionSound|ResetScript|MessageLinked|PushObject|PassCollisions|AxisAngle2Rot|Rot2(?:Axis|Angle)|A(?:cos|sin)|AngleBetween|AllowInventoryDrop|SubStringIndex|List2(?:CSV|Integer|Json|Float|String|Key|Vector|Rot|List(?:Strided)?)|DeleteSubList|List(?:Statistics|Sort|Randomize|(?:Insert|Find|Replace)List)|EdgeOfWorld|AdjustSoundVolume|Key2Name|TriggerSoundLimited|EjectFromLand|(?:CSV|ParseString)2List|OverMyLand|SameGroup|UnSit|Ground(?:Slope|Normal|Contour)|GroundRepel|(?:Set|Remove)VehicleFlags|(?:AvatarOn)?(?:Link)?SitTarget|Script(?:Danger|Profiler)|Dialog|VolumeDetect|ResetOtherScript|RemoteLoadScriptPin|(?:Open|Close)RemoteDataChannel|SendRemoteData|RemoteDataReply|(?:Integer|String)ToBase64|XorBase64|Log(?:10)?|Base64To(?:String|Integer)|ParseStringKeepNulls|RezAtRoot|RequestSimulatorData|ForceMouselook|(?:Load|Release|(?:E|Une)scape)URL|ParcelMedia(?:CommandList|Query)|ModPow|MapDestination|(?:RemoveFrom|AddTo|Reset)Land(?:Pass|Ban)List|(?:Set|Clear)CameraParams|HTTP(?:Request|Response)|TextBox|DetectedTouch(?:UV|Face|Pos|(?:N|Bin)ormal|ST)|(?:MD5|SHA1|DumpList2)String|Request(?:Secure)?URL|Clear(?:Prim|Link)Media|(?:Link)?ParticleSystem|(?:Get|Request)(?:Username|DisplayName)|RegionSayTo|CastRay|GenerateKey|TransferLindenDollars|ManageEstateAccess|(?:Create|Delete)Character|ExecCharacterCmd|Evade|FleeFrom|NavigateTo|PatrolPoints|Pursue|UpdateCharacter|WanderWithin))\b'
-    lsl_constants_float = r'\b(?:DEG_TO_RAD|PI(?:_BY_TWO)?|RAD_TO_DEG|SQRT2|TWO_PI)\b'
-    lsl_constants_integer = r'\b(?:JSON_APPEND|STATUS_(?:PHYSICS|ROTATE_[XYZ]|PHANTOM|SANDBOX|BLOCK_GRAB(?:_OBJECT)?|(?:DIE|RETURN)_AT_EDGE|CAST_SHADOWS|OK|MALFORMED_PARAMS|TYPE_MISMATCH|BOUNDS_ERROR|NOT_(?:FOUND|SUPPORTED)|INTERNAL_ERROR|WHITELIST_FAILED)|AGENT(?:_(?:BY_(?:LEGACY_|USER)NAME|FLYING|ATTACHMENTS|SCRIPTED|MOUSELOOK|SITTING|ON_OBJECT|AWAY|WALKING|IN_AIR|TYPING|CROUCHING|BUSY|ALWAYS_RUN|AUTOPILOT|LIST_(?:PARCEL(?:_OWNER)?|REGION)))?|CAMERA_(?:PITCH|DISTANCE|BEHINDNESS_(?:ANGLE|LAG)|(?:FOCUS|POSITION)(?:_(?:THRESHOLD|LOCKED|LAG))?|FOCUS_OFFSET|ACTIVE)|ANIM_ON|LOOP|REVERSE|PING_PONG|SMOOTH|ROTATE|SCALE|ALL_SIDES|LINK_(?:ROOT|SET|ALL_(?:OTHERS|CHILDREN)|THIS)|ACTIVE|PASSIVE|SCRIPTED|CONTROL_(?:FWD|BACK|(?:ROT_)?(?:LEFT|RIGHT)|UP|DOWN|(?:ML_)?LBUTTON)|PERMISSION_(?:RETURN_OBJECTS|DEBIT|OVERRIDE_ANIMATIONS|SILENT_ESTATE_MANAGEMENT|TAKE_CONTROLS|TRIGGER_ANIMATION|ATTACH|CHANGE_LINKS|(?:CONTROL|TRACK)_CAMERA|TELEPORT)|INVENTORY_(?:TEXTURE|SOUND|OBJECT|SCRIPT|LANDMARK|CLOTHING|NOTECARD|BODYPART|ANIMATION|GESTURE|ALL|NONE)|CHANGED_(?:INVENTORY|COLOR|SHAPE|SCALE|TEXTURE|LINK|ALLOWED_DROP|OWNER|REGION(?:_START)?|TELEPORT|MEDIA)|OBJECT_(?:(?:PHYSICS|SERVER|STREAMING)_COST|UNKNOWN_DETAIL|CHARACTER_TIME|PHANTOM|PHYSICS|TEMP_ON_REZ|NAME|DESC|POS|PRIM_EQUIVALENCE|RETURN_(?:PARCEL(?:_OWNER)?|REGION)|ROO?T|VELOCITY|OWNER|GROUP|CREATOR|ATTACHED_POINT|RENDER_WEIGHT|PATHFINDING_TYPE|(?:RUNNING|TOTAL)_SCRIPT_COUNT|SCRIPT_(?:MEMORY|TIME))|TYPE_(?:INTEGER|FLOAT|STRING|KEY|VECTOR|ROTATION|INVALID)|(?:DEBUG|PUBLIC)_CHANNEL|ATTACH_(?:AVATAR_CENTER|CHEST|HEAD|BACK|PELVIS|MOUTH|CHIN|NECK|NOSE|BELLY|[LR](?:SHOULDER|HAND|FOOT|EAR|EYE|[UL](?:ARM|LEG)|HIP)|(?:LEFT|RIGHT)_PEC|HUD_(?:CENTER_[12]|TOP_(?:RIGHT|CENTER|LEFT)|BOTTOM(?:_(?:RIGHT|LEFT))?))|LAND_(?:LEVEL|RAISE|LOWER|SMOOTH|NOISE|REVERT)|DATA_(?:ONLINE|NAME|BORN|SIM_(?:POS|STATUS|RATING)|PAYINFO)|PAYMENT_INFO_(?:ON_FILE|USED)|REMOTE_DATA_(?:CHANNEL|REQUEST|REPLY)|PSYS_(?:PART_(?:BF_(?:ZERO|ONE(?:_MINUS_(?:DEST_COLOR|SOURCE_(ALPHA|COLOR)))?|DEST_COLOR|SOURCE_(ALPHA|COLOR))|BLEND_FUNC_(DEST|SOURCE)|FLAGS|(?:START|END)_(?:COLOR|ALPHA|SCALE|GLOW)|MAX_AGE|(?:RIBBON|WIND|INTERP_(?:COLOR|SCALE)|BOUNCE|FOLLOW_(?:SRC|VELOCITY)|TARGET_(?:POS|LINEAR)|EMISSIVE)_MASK)|SRC_(?:MAX_AGE|PATTERN|ANGLE_(?:BEGIN|END)|BURST_(?:RATE|PART_COUNT|RADIUS|SPEED_(?:MIN|MAX))|ACCEL|TEXTURE|TARGET_KEY|OMEGA|PATTERN_(?:DROP|EXPLODE|ANGLE(?:_CONE(?:_EMPTY)?)?)))|VEHICLE_(?:REFERENCE_FRAME|TYPE_(?:NONE|SLED|CAR|BOAT|AIRPLANE|BALLOON)|(?:LINEAR|ANGULAR)_(?:FRICTION_TIMESCALE|MOTOR_DIRECTION)|LINEAR_MOTOR_OFFSET|HOVER_(?:HEIGHT|EFFICIENCY|TIMESCALE)|BUOYANCY|(?:LINEAR|ANGULAR)_(?:DEFLECTION_(?:EFFICIENCY|TIMESCALE)|MOTOR_(?:DECAY_)?TIMESCALE)|VERTICAL_ATTRACTION_(?:EFFICIENCY|TIMESCALE)|BANKING_(?:EFFICIENCY|MIX|TIMESCALE)|FLAG_(?:NO_DEFLECTION_UP|LIMIT_(?:ROLL_ONLY|MOTOR_UP)|HOVER_(?:(?:WATER|TERRAIN|UP)_ONLY|GLOBAL_HEIGHT)|MOUSELOOK_(?:STEER|BANK)|CAMERA_DECOUPLED))|PRIM_(?:TYPE(?:_(?:BOX|CYLINDER|PRISM|SPHERE|TORUS|TUBE|RING|SCULPT))?|HOLE_(?:DEFAULT|CIRCLE|SQUARE|TRIANGLE)|MATERIAL(?:_(?:STONE|METAL|GLASS|WOOD|FLESH|PLASTIC|RUBBER))?|SHINY_(?:NONE|LOW|MEDIUM|HIGH)|BUMP_(?:NONE|BRIGHT|DARK|WOOD|BARK|BRICKS|CHECKER|CONCRETE|TILE|STONE|DISKS|GRAVEL|BLOBS|SIDING|LARGETILE|STUCCO|SUCTION|WEAVE)|TEXGEN_(?:DEFAULT|PLANAR)|SCULPT_(?:TYPE_(?:SPHERE|TORUS|PLANE|CYLINDER|MASK)|FLAG_(?:MIRROR|INVERT))|PHYSICS(?:_(?:SHAPE_(?:CONVEX|NONE|PRIM|TYPE)))?|(?:POS|ROT)_LOCAL|SLICE|TEXT|FLEXIBLE|POINT_LIGHT|TEMP_ON_REZ|PHANTOM|POSITION|SIZE|ROTATION|TEXTURE|NAME|OMEGA|DESC|LINK_TARGET|COLOR|BUMP_SHINY|FULLBRIGHT|TEXGEN|GLOW|MEDIA_(?:ALT_IMAGE_ENABLE|CONTROLS|(?:CURRENT|HOME)_URL|AUTO_(?:LOOP|PLAY|SCALE|ZOOM)|FIRST_CLICK_INTERACT|(?:WIDTH|HEIGHT)_PIXELS|WHITELIST(?:_ENABLE)?|PERMS_(?:INTERACT|CONTROL)|PARAM_MAX|CONTROLS_(?:STANDARD|MINI)|PERM_(?:NONE|OWNER|GROUP|ANYONE)|MAX_(?:URL_LENGTH|WHITELIST_(?:SIZE|COUNT)|(?:WIDTH|HEIGHT)_PIXELS)))|MASK_(?:BASE|OWNER|GROUP|EVERYONE|NEXT)|PERM_(?:TRANSFER|MODIFY|COPY|MOVE|ALL)|PARCEL_(?:MEDIA_COMMAND_(?:STOP|PAUSE|PLAY|LOOP|TEXTURE|URL|TIME|AGENT|UNLOAD|AUTO_ALIGN|TYPE|SIZE|DESC|LOOP_SET)|FLAG_(?:ALLOW_(?:FLY|(?:GROUP_)?SCRIPTS|LANDMARK|TERRAFORM|DAMAGE|CREATE_(?:GROUP_)?OBJECTS)|USE_(?:ACCESS_(?:GROUP|LIST)|BAN_LIST|LAND_PASS_LIST)|LOCAL_SOUND_ONLY|RESTRICT_PUSHOBJECT|ALLOW_(?:GROUP|ALL)_OBJECT_ENTRY)|COUNT_(?:TOTAL|OWNER|GROUP|OTHER|SELECTED|TEMP)|DETAILS_(?:NAME|DESC|OWNER|GROUP|AREA|ID|SEE_AVATARS))|LIST_STAT_(?:MAX|MIN|MEAN|MEDIAN|STD_DEV|SUM(?:_SQUARES)?|NUM_COUNT|GEOMETRIC_MEAN|RANGE)|PAY_(?:HIDE|DEFAULT)|REGION_FLAG_(?:ALLOW_DAMAGE|FIXED_SUN|BLOCK_TERRAFORM|SANDBOX|DISABLE_(?:COLLISIONS|PHYSICS)|BLOCK_FLY|ALLOW_DIRECT_TELEPORT|RESTRICT_PUSHOBJECT)|HTTP_(?:METHOD|MIMETYPE|BODY_(?:MAXLENGTH|TRUNCATED)|CUSTOM_HEADER|PRAGMA_NO_CACHE|VERBOSE_THROTTLE|VERIFY_CERT)|STRING_(?:TRIM(?:_(?:HEAD|TAIL))?)|CLICK_ACTION_(?:NONE|TOUCH|SIT|BUY|PAY|OPEN(?:_MEDIA)?|PLAY|ZOOM)|TOUCH_INVALID_FACE|PROFILE_(?:NONE|SCRIPT_MEMORY)|RC_(?:DATA_FLAGS|DETECT_PHANTOM|GET_(?:LINK_NUM|NORMAL|ROOT_KEY)|MAX_HITS|REJECT_(?:TYPES|AGENTS|(?:NON)?PHYSICAL|LAND))|RCERR_(?:CAST_TIME_EXCEEDED|SIM_PERF_LOW|UNKNOWN)|ESTATE_ACCESS_(?:ALLOWED_(?:AGENT|GROUP)_(?:ADD|REMOVE)|BANNED_AGENT_(?:ADD|REMOVE))|DENSITY|FRICTION|RESTITUTION|GRAVITY_MULTIPLIER|KFM_(?:COMMAND|CMD_(?:PLAY|STOP|PAUSE|SET_MODE)|MODE|FORWARD|LOOP|PING_PONG|REVERSE|DATA|ROTATION|TRANSLATION)|ERR_(?:GENERIC|PARCEL_PERMISSIONS|MALFORMED_PARAMS|RUNTIME_PERMISSIONS|THROTTLED)|CHARACTER_(?:CMD_(?:(?:SMOOTH_)?STOP|JUMP)|DESIRED_(?:TURN_)?SPEED|RADIUS|STAY_WITHIN_PARCEL|LENGTH|ORIENTATION|ACCOUNT_FOR_SKIPPED_FRAMES|AVOIDANCE_MODE|TYPE(?:_(?:[A-D]|NONE))?|MAX_(?:DECEL|TURN_RADIUS|(?:ACCEL|SPEED)))|PURSUIT_(?:OFFSET|FUZZ_FACTOR|GOAL_TOLERANCE|INTERCEPT)|REQUIRE_LINE_OF_SIGHT|FORCE_DIRECT_PATH|VERTICAL|HORIZONTAL|AVOID_(?:CHARACTERS|DYNAMIC_OBSTACLES|NONE)|PU_(?:EVADE_(?:HIDDEN|SPOTTED)|FAILURE_(?:DYNAMIC_PATHFINDING_DISABLED|INVALID_(?:GOAL|START)|NO_(?:NAVMESH|VALID_DESTINATION)|OTHER|TARGET_GONE|(?:PARCEL_)?UNREACHABLE)|(?:GOAL|SLOWDOWN_DISTANCE)_REACHED)|TRAVERSAL_TYPE(?:_(?:FAST|NONE|SLOW))?|CONTENT_TYPE_(?:ATOM|FORM|HTML|JSON|LLSD|RSS|TEXT|XHTML|XML)|GCNP_(?:RADIUS|STATIC)|(?:PATROL|WANDER)_PAUSE_AT_WAYPOINTS|OPT_(?:AVATAR|CHARACTER|EXCLUSION_VOLUME|LEGACY_LINKSET|MATERIAL_VOLUME|OTHER|STATIC_OBSTACLE|WALKABLE)|SIM_STAT_PCT_CHARS_STEPPED)\b'
-    lsl_constants_integer_boolean = r'\b(?:FALSE|TRUE)\b'
-    lsl_constants_rotation = r'\b(?:ZERO_ROTATION)\b'
-    lsl_constants_string = r'\b(?:EOF|JSON_(?:ARRAY|DELETE|FALSE|INVALID|NULL|NUMBER|OBJECT|STRING|TRUE)|NULL_KEY|TEXTURE_(?:BLANK|DEFAULT|MEDIA|PLYWOOD|TRANSPARENT)|URL_REQUEST_(?:GRANTED|DENIED))\b'
-    lsl_constants_vector = r'\b(?:TOUCH_INVALID_(?:TEXCOORD|VECTOR)|ZERO_VECTOR)\b'
-    lsl_invalid_broken = r'\b(?:LAND_(?:LARGE|MEDIUM|SMALL)_BRUSH)\b'
-    lsl_invalid_deprecated = r'\b(?:ATTACH_[LR]PEC|DATA_RATING|OBJECT_ATTACHMENT_(?:GEOMETRY_BYTES|SURFACE_AREA)|PRIM_(?:CAST_SHADOWS|MATERIAL_LIGHT|TYPE_LEGACY)|PSYS_SRC_(?:INNER|OUTER)ANGLE|VEHICLE_FLAG_NO_FLY_UP|ll(?:Cloud|Make(?:Explosion|Fountain|Smoke|Fire)|RemoteDataSetRegion|Sound(?:Preload)?|XorBase64Strings(?:Correct)?))\b'
-    lsl_invalid_illegal = r'\b(?:event)\b'
-    lsl_invalid_unimplemented = r'\b(?:CHARACTER_(?:MAX_ANGULAR_(?:ACCEL|SPEED)|TURN_SPEED_MULTIPLIER)|PERMISSION_(?:CHANGE_(?:JOINTS|PERMISSIONS)|RELEASE_OWNERSHIP|REMAP_CONTROLS)|PRIM_PHYSICS_MATERIAL|PSYS_SRC_OBJ_REL_MASK|ll(?:CollisionSprite|(?:Stop)?PointAt|(?:(?:Refresh|Set)Prim)URL|(?:Take|Release)Camera|RemoteLoadScript))\b'
-    lsl_reserved_godmode = r'\b(?:ll(?:GodLikeRezObject|Set(?:Inventory|Object)PermMask))\b'
-    lsl_reserved_log = r'\b(?:print)\b'
-    lsl_operators = r'\+\+|\-\-|<<|>>|&&?|\|\|?|\^|~|[!%<>=*+\-/]=?'
-
-    tokens = {
-        'root':
-        [
-            (r'//.*?\n',                          Comment.Single),
-            (r'/\*',                              Comment.Multiline, 'comment'),
-            (r'"',                                String.Double, 'string'),
-            (lsl_keywords,                        Keyword),
-            (lsl_types,                           Keyword.Type),
-            (lsl_states,                          Name.Class),
-            (lsl_events,                          Name.Builtin),
-            (lsl_functions_builtin,               Name.Function),
-            (lsl_constants_float,                 Keyword.Constant),
-            (lsl_constants_integer,               Keyword.Constant),
-            (lsl_constants_integer_boolean,       Keyword.Constant),
-            (lsl_constants_rotation,              Keyword.Constant),
-            (lsl_constants_string,                Keyword.Constant),
-            (lsl_constants_vector,                Keyword.Constant),
-            (lsl_invalid_broken,                  Error),
-            (lsl_invalid_deprecated,              Error),
-            (lsl_invalid_illegal,                 Error),
-            (lsl_invalid_unimplemented,           Error),
-            (lsl_reserved_godmode,                Keyword.Reserved),
-            (lsl_reserved_log,                    Keyword.Reserved),
-            (r'\b([a-zA-Z_]\w*)\b',     Name.Variable),
-            (r'(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d*', Number.Float),
-            (r'(\d+\.\d*|\.\d+)',                 Number.Float),
-            (r'0[xX][0-9a-fA-F]+',                Number.Hex),
-            (r'\d+',                              Number.Integer),
-            (lsl_operators,                       Operator),
-            (r':=?',                              Error),
-            (r'[,;{}()\[\]]',                     Punctuation),
-            (r'\n+',                              Whitespace),
-            (r'\s+',                              Whitespace)
-        ],
-        'comment':
-        [
-            (r'[^*/]+',                           Comment.Multiline),
-            (r'/\*',                              Comment.Multiline, '#push'),
-            (r'\*/',                              Comment.Multiline, '#pop'),
-            (r'[*/]',                             Comment.Multiline)
-        ],
-        'string':
-        [
-            (r'\\([nt"\\])',                      String.Escape),
-            (r'"',                                String.Double, '#pop'),
-            (r'\\.',                              Error),
-            (r'[^"\\]+',                          String.Double),
-        ]
-    }
-
-
-class AppleScriptLexer(RegexLexer):
-    """
-    For AppleScript source code,
-    including `AppleScript Studio
-    `_.
-    Contributed by Andreas Amann .
-
-    .. versionadded:: 1.0
-    """
-
-    name = 'AppleScript'
-    url = 'https://developer.apple.com/library/archive/documentation/AppleScript/Conceptual/AppleScriptLangGuide/introduction/ASLR_intro.html'
-    aliases = ['applescript']
-    filenames = ['*.applescript']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    Identifiers = r'[a-zA-Z]\w*'
-
-    # XXX: use words() for all of these
-    Literals = ('AppleScript', 'current application', 'false', 'linefeed',
-                'missing value', 'pi', 'quote', 'result', 'return', 'space',
-                'tab', 'text item delimiters', 'true', 'version')
-    Classes = ('alias ', 'application ', 'boolean ', 'class ', 'constant ',
-               'date ', 'file ', 'integer ', 'list ', 'number ', 'POSIX file ',
-               'real ', 'record ', 'reference ', 'RGB color ', 'script ',
-               'text ', 'unit types', '(?:Unicode )?text', 'string')
-    BuiltIn = ('attachment', 'attribute run', 'character', 'day', 'month',
-               'paragraph', 'word', 'year')
-    HandlerParams = ('about', 'above', 'against', 'apart from', 'around',
-                     'aside from', 'at', 'below', 'beneath', 'beside',
-                     'between', 'for', 'given', 'instead of', 'on', 'onto',
-                     'out of', 'over', 'since')
-    Commands = ('ASCII (character|number)', 'activate', 'beep', 'choose URL',
-                'choose application', 'choose color', 'choose file( name)?',
-                'choose folder', 'choose from list',
-                'choose remote application', 'clipboard info',
-                'close( access)?', 'copy', 'count', 'current date', 'delay',
-                'delete', 'display (alert|dialog)', 'do shell script',
-                'duplicate', 'exists', 'get eof', 'get volume settings',
-                'info for', 'launch', 'list (disks|folder)', 'load script',
-                'log', 'make', 'mount volume', 'new', 'offset',
-                'open( (for access|location))?', 'path to', 'print', 'quit',
-                'random number', 'read', 'round', 'run( script)?',
-                'say', 'scripting components',
-                'set (eof|the clipboard to|volume)', 'store script',
-                'summarize', 'system attribute', 'system info',
-                'the clipboard', 'time to GMT', 'write', 'quoted form')
-    References = ('(in )?back of', '(in )?front of', '[0-9]+(st|nd|rd|th)',
-                  'first', 'second', 'third', 'fourth', 'fifth', 'sixth',
-                  'seventh', 'eighth', 'ninth', 'tenth', 'after', 'back',
-                  'before', 'behind', 'every', 'front', 'index', 'last',
-                  'middle', 'some', 'that', 'through', 'thru', 'where', 'whose')
-    Operators = ("and", "or", "is equal", "equals", "(is )?equal to", "is not",
-                 "isn't", "isn't equal( to)?", "is not equal( to)?",
-                 "doesn't equal", "does not equal", "(is )?greater than",
-                 "comes after", "is not less than or equal( to)?",
-                 "isn't less than or equal( to)?", "(is )?less than",
-                 "comes before", "is not greater than or equal( to)?",
-                 "isn't greater than or equal( to)?",
-                 "(is  )?greater than or equal( to)?", "is not less than",
-                 "isn't less than", "does not come before",
-                 "doesn't come before", "(is )?less than or equal( to)?",
-                 "is not greater than", "isn't greater than",
-                 "does not come after", "doesn't come after", "starts? with",
-                 "begins? with", "ends? with", "contains?", "does not contain",
-                 "doesn't contain", "is in", "is contained by", "is not in",
-                 "is not contained by", "isn't contained by", "div", "mod",
-                 "not", "(a  )?(ref( to)?|reference to)", "is", "does")
-    Control = ('considering', 'else', 'error', 'exit', 'from', 'if',
-               'ignoring', 'in', 'repeat', 'tell', 'then', 'times', 'to',
-               'try', 'until', 'using terms from', 'while', 'whith',
-               'with timeout( of)?', 'with transaction', 'by', 'continue',
-               'end', 'its?', 'me', 'my', 'return', 'of', 'as')
-    Declarations = ('global', 'local', 'prop(erty)?', 'set', 'get')
-    Reserved = ('but', 'put', 'returning', 'the')
-    StudioClasses = ('action cell', 'alert reply', 'application', 'box',
-                     'browser( cell)?', 'bundle', 'button( cell)?', 'cell',
-                     'clip view', 'color well', 'color-panel',
-                     'combo box( item)?', 'control',
-                     'data( (cell|column|item|row|source))?', 'default entry',
-                     'dialog reply', 'document', 'drag info', 'drawer',
-                     'event', 'font(-panel)?', 'formatter',
-                     'image( (cell|view))?', 'matrix', 'menu( item)?', 'item',
-                     'movie( view)?', 'open-panel', 'outline view', 'panel',
-                     'pasteboard', 'plugin', 'popup button',
-                     'progress indicator', 'responder', 'save-panel',
-                     'scroll view', 'secure text field( cell)?', 'slider',
-                     'sound', 'split view', 'stepper', 'tab view( item)?',
-                     'table( (column|header cell|header view|view))',
-                     'text( (field( cell)?|view))?', 'toolbar( item)?',
-                     'user-defaults', 'view', 'window')
-    StudioEvents = ('accept outline drop', 'accept table drop', 'action',
-                    'activated', 'alert ended', 'awake from nib', 'became key',
-                    'became main', 'begin editing', 'bounds changed',
-                    'cell value', 'cell value changed', 'change cell value',
-                    'change item value', 'changed', 'child of item',
-                    'choose menu item', 'clicked', 'clicked toolbar item',
-                    'closed', 'column clicked', 'column moved',
-                    'column resized', 'conclude drop', 'data representation',
-                    'deminiaturized', 'dialog ended', 'document nib name',
-                    'double clicked', 'drag( (entered|exited|updated))?',
-                    'drop', 'end editing', 'exposed', 'idle', 'item expandable',
-                    'item value', 'item value changed', 'items changed',
-                    'keyboard down', 'keyboard up', 'launched',
-                    'load data representation', 'miniaturized', 'mouse down',
-                    'mouse dragged', 'mouse entered', 'mouse exited',
-                    'mouse moved', 'mouse up', 'moved',
-                    'number of browser rows', 'number of items',
-                    'number of rows', 'open untitled', 'opened', 'panel ended',
-                    'parameters updated', 'plugin loaded', 'prepare drop',
-                    'prepare outline drag', 'prepare outline drop',
-                    'prepare table drag', 'prepare table drop',
-                    'read from file', 'resigned active', 'resigned key',
-                    'resigned main', 'resized( sub views)?',
-                    'right mouse down', 'right mouse dragged',
-                    'right mouse up', 'rows changed', 'scroll wheel',
-                    'selected tab view item', 'selection changed',
-                    'selection changing', 'should begin editing',
-                    'should close', 'should collapse item',
-                    'should end editing', 'should expand item',
-                    'should open( untitled)?',
-                    'should quit( after last window closed)?',
-                    'should select column', 'should select item',
-                    'should select row', 'should select tab view item',
-                    'should selection change', 'should zoom', 'shown',
-                    'update menu item', 'update parameters',
-                    'update toolbar item', 'was hidden', 'was miniaturized',
-                    'will become active', 'will close', 'will dismiss',
-                    'will display browser cell', 'will display cell',
-                    'will display item cell', 'will display outline cell',
-                    'will finish launching', 'will hide', 'will miniaturize',
-                    'will move', 'will open', 'will pop up', 'will quit',
-                    'will resign active', 'will resize( sub views)?',
-                    'will select tab view item', 'will show', 'will zoom',
-                    'write to file', 'zoomed')
-    StudioCommands = ('animate', 'append', 'call method', 'center',
-                      'close drawer', 'close panel', 'display',
-                      'display alert', 'display dialog', 'display panel', 'go',
-                      'hide', 'highlight', 'increment', 'item for',
-                      'load image', 'load movie', 'load nib', 'load panel',
-                      'load sound', 'localized string', 'lock focus', 'log',
-                      'open drawer', 'path for', 'pause', 'perform action',
-                      'play', 'register', 'resume', 'scroll', 'select( all)?',
-                      'show', 'size to fit', 'start', 'step back',
-                      'step forward', 'stop', 'synchronize', 'unlock focus',
-                      'update')
-    StudioProperties = ('accepts arrow key', 'action method', 'active',
-                        'alignment', 'allowed identifiers',
-                        'allows branch selection', 'allows column reordering',
-                        'allows column resizing', 'allows column selection',
-                        'allows customization',
-                        'allows editing text attributes',
-                        'allows empty selection', 'allows mixed state',
-                        'allows multiple selection', 'allows reordering',
-                        'allows undo', 'alpha( value)?', 'alternate image',
-                        'alternate increment value', 'alternate title',
-                        'animation delay', 'associated file name',
-                        'associated object', 'auto completes', 'auto display',
-                        'auto enables items', 'auto repeat',
-                        'auto resizes( outline column)?',
-                        'auto save expanded items', 'auto save name',
-                        'auto save table columns', 'auto saves configuration',
-                        'auto scroll', 'auto sizes all columns to fit',
-                        'auto sizes cells', 'background color', 'bezel state',
-                        'bezel style', 'bezeled', 'border rect', 'border type',
-                        'bordered', 'bounds( rotation)?', 'box type',
-                        'button returned', 'button type',
-                        'can choose directories', 'can choose files',
-                        'can draw', 'can hide',
-                        'cell( (background color|size|type))?', 'characters',
-                        'class', 'click count', 'clicked( data)? column',
-                        'clicked data item', 'clicked( data)? row',
-                        'closeable', 'collating', 'color( (mode|panel))',
-                        'command key down', 'configuration',
-                        'content(s| (size|view( margins)?))?', 'context',
-                        'continuous', 'control key down', 'control size',
-                        'control tint', 'control view',
-                        'controller visible', 'coordinate system',
-                        'copies( on scroll)?', 'corner view', 'current cell',
-                        'current column', 'current( field)?  editor',
-                        'current( menu)? item', 'current row',
-                        'current tab view item', 'data source',
-                        'default identifiers', 'delta (x|y|z)',
-                        'destination window', 'directory', 'display mode',
-                        'displayed cell', 'document( (edited|rect|view))?',
-                        'double value', 'dragged column', 'dragged distance',
-                        'dragged items', 'draws( cell)? background',
-                        'draws grid', 'dynamically scrolls', 'echos bullets',
-                        'edge', 'editable', 'edited( data)? column',
-                        'edited data item', 'edited( data)? row', 'enabled',
-                        'enclosing scroll view', 'ending page',
-                        'error handling', 'event number', 'event type',
-                        'excluded from windows menu', 'executable path',
-                        'expanded', 'fax number', 'field editor', 'file kind',
-                        'file name', 'file type', 'first responder',
-                        'first visible column', 'flipped', 'floating',
-                        'font( panel)?', 'formatter', 'frameworks path',
-                        'frontmost', 'gave up', 'grid color', 'has data items',
-                        'has horizontal ruler', 'has horizontal scroller',
-                        'has parent data item', 'has resize indicator',
-                        'has shadow', 'has sub menu', 'has vertical ruler',
-                        'has vertical scroller', 'header cell', 'header view',
-                        'hidden', 'hides when deactivated', 'highlights by',
-                        'horizontal line scroll', 'horizontal page scroll',
-                        'horizontal ruler view', 'horizontally resizable',
-                        'icon image', 'id', 'identifier',
-                        'ignores multiple clicks',
-                        'image( (alignment|dims when disabled|frame style|scaling))?',
-                        'imports graphics', 'increment value',
-                        'indentation per level', 'indeterminate', 'index',
-                        'integer value', 'intercell spacing', 'item height',
-                        'key( (code|equivalent( modifier)?|window))?',
-                        'knob thickness', 'label', 'last( visible)? column',
-                        'leading offset', 'leaf', 'level', 'line scroll',
-                        'loaded', 'localized sort', 'location', 'loop mode',
-                        'main( (bunde|menu|window))?', 'marker follows cell',
-                        'matrix mode', 'maximum( content)? size',
-                        'maximum visible columns',
-                        'menu( form representation)?', 'miniaturizable',
-                        'miniaturized', 'minimized image', 'minimized title',
-                        'minimum column width', 'minimum( content)? size',
-                        'modal', 'modified', 'mouse down state',
-                        'movie( (controller|file|rect))?', 'muted', 'name',
-                        'needs display', 'next state', 'next text',
-                        'number of tick marks', 'only tick mark values',
-                        'opaque', 'open panel', 'option key down',
-                        'outline table column', 'page scroll', 'pages across',
-                        'pages down', 'palette label', 'pane splitter',
-                        'parent data item', 'parent window', 'pasteboard',
-                        'path( (names|separator))?', 'playing',
-                        'plays every frame', 'plays selection only', 'position',
-                        'preferred edge', 'preferred type', 'pressure',
-                        'previous text', 'prompt', 'properties',
-                        'prototype cell', 'pulls down', 'rate',
-                        'released when closed', 'repeated',
-                        'requested print time', 'required file type',
-                        'resizable', 'resized column', 'resource path',
-                        'returns records', 'reuses columns', 'rich text',
-                        'roll over', 'row height', 'rulers visible',
-                        'save panel', 'scripts path', 'scrollable',
-                        'selectable( identifiers)?', 'selected cell',
-                        'selected( data)? columns?', 'selected data items?',
-                        'selected( data)? rows?', 'selected item identifier',
-                        'selection by rect', 'send action on arrow key',
-                        'sends action when done editing', 'separates columns',
-                        'separator item', 'sequence number', 'services menu',
-                        'shared frameworks path', 'shared support path',
-                        'sheet', 'shift key down', 'shows alpha',
-                        'shows state by', 'size( mode)?',
-                        'smart insert delete enabled', 'sort case sensitivity',
-                        'sort column', 'sort order', 'sort type',
-                        'sorted( data rows)?', 'sound', 'source( mask)?',
-                        'spell checking enabled', 'starting page', 'state',
-                        'string value', 'sub menu', 'super menu', 'super view',
-                        'tab key traverses cells', 'tab state', 'tab type',
-                        'tab view', 'table view', 'tag', 'target( printer)?',
-                        'text color', 'text container insert',
-                        'text container origin', 'text returned',
-                        'tick mark position', 'time stamp',
-                        'title(d| (cell|font|height|position|rect))?',
-                        'tool tip', 'toolbar', 'trailing offset', 'transparent',
-                        'treat packages as directories', 'truncated labels',
-                        'types', 'unmodified characters', 'update views',
-                        'use sort indicator', 'user defaults',
-                        'uses data source', 'uses ruler',
-                        'uses threaded animation',
-                        'uses title from previous column', 'value wraps',
-                        'version',
-                        'vertical( (line scroll|page scroll|ruler view))?',
-                        'vertically resizable', 'view',
-                        'visible( document rect)?', 'volume', 'width', 'window',
-                        'windows menu', 'wraps', 'zoomable', 'zoomed')
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'¬\n', String.Escape),
-            (r"'s\s+", Text),  # This is a possessive, consider moving
-            (r'(--|#).*?$', Comment),
-            (r'\(\*', Comment.Multiline, 'comment'),
-            (r'[(){}!,.:]', Punctuation),
-            (r'(«)([^»]+)(»)',
-             bygroups(Text, Name.Builtin, Text)),
-            (r'\b((?:considering|ignoring)\s*)'
-             r'(application responses|case|diacriticals|hyphens|'
-             r'numeric strings|punctuation|white space)',
-             bygroups(Keyword, Name.Builtin)),
-            (r'(-|\*|\+|&|≠|>=?|<=?|=|≥|≤|/|÷|\^)', Operator),
-            (r"\b(%s)\b" % '|'.join(Operators), Operator.Word),
-            (r'^(\s*(?:on|end)\s+)'
-             r'(%s)' % '|'.join(StudioEvents[::-1]),
-             bygroups(Keyword, Name.Function)),
-            (r'^(\s*)(in|on|script|to)(\s+)', bygroups(Text, Keyword, Text)),
-            (r'\b(as )(%s)\b' % '|'.join(Classes),
-             bygroups(Keyword, Name.Class)),
-            (r'\b(%s)\b' % '|'.join(Literals), Name.Constant),
-            (r'\b(%s)\b' % '|'.join(Commands), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(Control), Keyword),
-            (r'\b(%s)\b' % '|'.join(Declarations), Keyword),
-            (r'\b(%s)\b' % '|'.join(Reserved), Name.Builtin),
-            (r'\b(%s)s?\b' % '|'.join(BuiltIn), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(HandlerParams), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(StudioProperties), Name.Attribute),
-            (r'\b(%s)s?\b' % '|'.join(StudioClasses), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(StudioCommands), Name.Builtin),
-            (r'\b(%s)\b' % '|'.join(References), Name.Builtin),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String.Double),
-            (r'\b(%s)\b' % Identifiers, Name.Variable),
-            (r'[-+]?(\d+\.\d*|\d*\.\d+)(E[-+][0-9]+)?', Number.Float),
-            (r'[-+]?\d+', Number.Integer),
-        ],
-        'comment': [
-            (r'\(\*', Comment.Multiline, '#push'),
-            (r'\*\)', Comment.Multiline, '#pop'),
-            ('[^*(]+', Comment.Multiline),
-            ('[*(]', Comment.Multiline),
-        ],
-    }
-
-
-class RexxLexer(RegexLexer):
-    """
-    Rexx is a scripting language available for
-    a wide range of different platforms with its roots found on mainframe
-    systems. It is popular for I/O- and data based tasks and can act as glue
-    language to bind different applications together.
-
-    .. versionadded:: 2.0
-    """
-    name = 'Rexx'
-    url = 'http://www.rexxinfo.org/'
-    aliases = ['rexx', 'arexx']
-    filenames = ['*.rexx', '*.rex', '*.rx', '*.arexx']
-    mimetypes = ['text/x-rexx']
-    flags = re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'\s+', Whitespace),
-            (r'/\*', Comment.Multiline, 'comment'),
-            (r'"', String, 'string_double'),
-            (r"'", String, 'string_single'),
-            (r'[0-9]+(\.[0-9]+)?(e[+-]?[0-9])?', Number),
-            (r'([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b',
-             bygroups(Name.Function, Whitespace, Operator, Whitespace,
-                      Keyword.Declaration)),
-            (r'([a-z_]\w*)(\s*)(:)',
-             bygroups(Name.Label, Whitespace, Operator)),
-            include('function'),
-            include('keyword'),
-            include('operator'),
-            (r'[a-z_]\w*', Text),
-        ],
-        'function': [
-            (words((
-                'abbrev', 'abs', 'address', 'arg', 'b2x', 'bitand', 'bitor', 'bitxor',
-                'c2d', 'c2x', 'center', 'charin', 'charout', 'chars', 'compare',
-                'condition', 'copies', 'd2c', 'd2x', 'datatype', 'date', 'delstr',
-                'delword', 'digits', 'errortext', 'form', 'format', 'fuzz', 'insert',
-                'lastpos', 'left', 'length', 'linein', 'lineout', 'lines', 'max',
-                'min', 'overlay', 'pos', 'queued', 'random', 'reverse', 'right', 'sign',
-                'sourceline', 'space', 'stream', 'strip', 'substr', 'subword', 'symbol',
-                'time', 'trace', 'translate', 'trunc', 'value', 'verify', 'word',
-                'wordindex', 'wordlength', 'wordpos', 'words', 'x2b', 'x2c', 'x2d',
-                'xrange'), suffix=r'(\s*)(\()'),
-             bygroups(Name.Builtin, Whitespace, Operator)),
-        ],
-        'keyword': [
-            (r'(address|arg|by|call|do|drop|else|end|exit|for|forever|if|'
-             r'interpret|iterate|leave|nop|numeric|off|on|options|parse|'
-             r'pull|push|queue|return|say|select|signal|to|then|trace|until|'
-             r'while)\b', Keyword.Reserved),
-        ],
-        'operator': [
-            (r'(-|//|/|\(|\)|\*\*|\*|\\<<|\\<|\\==|\\=|\\>>|\\>|\\|\|\||\||'
-             r'&&|&|%|\+|<<=|<<|<=|<>|<|==|=|><|>=|>>=|>>|>|¬<<|¬<|¬==|¬=|'
-             r'¬>>|¬>|¬|\.|,)', Operator),
-        ],
-        'string_double': [
-            (r'[^"\n]+', String),
-            (r'""', String),
-            (r'"', String, '#pop'),
-            (r'\n', Text, '#pop'),  # Stray linefeed also terminates strings.
-        ],
-        'string_single': [
-            (r'[^\'\n]+', String),
-            (r'\'\'', String),
-            (r'\'', String, '#pop'),
-            (r'\n', Text, '#pop'),  # Stray linefeed also terminates strings.
-        ],
-        'comment': [
-            (r'[^*]+', Comment.Multiline),
-            (r'\*/', Comment.Multiline, '#pop'),
-            (r'\*', Comment.Multiline),
-        ]
-    }
-
-    _c = lambda s: re.compile(s, re.MULTILINE)
-    _ADDRESS_COMMAND_PATTERN = _c(r'^\s*address\s+command\b')
-    _ADDRESS_PATTERN = _c(r'^\s*address\s+')
-    _DO_WHILE_PATTERN = _c(r'^\s*do\s+while\b')
-    _IF_THEN_DO_PATTERN = _c(r'^\s*if\b.+\bthen\s+do\s*$')
-    _PROCEDURE_PATTERN = _c(r'^\s*([a-z_]\w*)(\s*)(:)(\s*)(procedure)\b')
-    _ELSE_DO_PATTERN = _c(r'\belse\s+do\s*$')
-    _PARSE_ARG_PATTERN = _c(r'^\s*parse\s+(upper\s+)?(arg|value)\b')
-    PATTERNS_AND_WEIGHTS = (
-        (_ADDRESS_COMMAND_PATTERN, 0.2),
-        (_ADDRESS_PATTERN, 0.05),
-        (_DO_WHILE_PATTERN, 0.1),
-        (_ELSE_DO_PATTERN, 0.1),
-        (_IF_THEN_DO_PATTERN, 0.1),
-        (_PROCEDURE_PATTERN, 0.5),
-        (_PARSE_ARG_PATTERN, 0.2),
-    )
-
-    def analyse_text(text):
-        """
-        Check for initial comment and patterns that distinguish Rexx from other
-        C-like languages.
-        """
-        if re.search(r'/\*\**\s*rexx', text, re.IGNORECASE):
-            # Header matches MVS Rexx requirements, this is certainly a Rexx
-            # script.
-            return 1.0
-        elif text.startswith('/*'):
-            # Header matches general Rexx requirements; the source code might
-            # still be any language using C comments such as C++, C# or Java.
-            lowerText = text.lower()
-            result = sum(weight
-                         for (pattern, weight) in RexxLexer.PATTERNS_AND_WEIGHTS
-                         if pattern.search(lowerText)) + 0.01
-            return min(result, 1.0)
-
-
-class MOOCodeLexer(RegexLexer):
-    """
-    For MOOCode (the MOO scripting language).
-
-    .. versionadded:: 0.9
-    """
-    name = 'MOOCode'
-    url = 'http://www.moo.mud.org/'
-    filenames = ['*.moo']
-    aliases = ['moocode', 'moo']
-    mimetypes = ['text/x-moocode']
-
-    tokens = {
-        'root': [
-            # Numbers
-            (r'(0|[1-9][0-9_]*)', Number.Integer),
-            # Strings
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            # exceptions
-            (r'(E_PERM|E_DIV)', Name.Exception),
-            # db-refs
-            (r'((#[-0-9]+)|(\$\w+))', Name.Entity),
-            # Keywords
-            (r'\b(if|else|elseif|endif|for|endfor|fork|endfork|while'
-             r'|endwhile|break|continue|return|try'
-             r'|except|endtry|finally|in)\b', Keyword),
-            # builtins
-            (r'(random|length)', Name.Builtin),
-            # special variables
-            (r'(player|caller|this|args)', Name.Variable.Instance),
-            # skip whitespace
-            (r'\s+', Text),
-            (r'\n', Text),
-            # other operators
-            (r'([!;=,{}&|:.\[\]@()<>?]+)', Operator),
-            # function call
-            (r'(\w+)(\()', bygroups(Name.Function, Operator)),
-            # variables
-            (r'(\w+)', Text),
-        ]
-    }
-
-
-class HybrisLexer(RegexLexer):
-    """
-    For Hybris source code.
-
-    .. versionadded:: 1.4
-    """
-
-    name = 'Hybris'
-    aliases = ['hybris', 'hy']
-    filenames = ['*.hy', '*.hyb']
-    mimetypes = ['text/x-hybris', 'application/x-hybris']
-
-    flags = re.MULTILINE | re.DOTALL
-
-    tokens = {
-        'root': [
-            # method names
-            (r'^(\s*(?:function|method|operator\s+)+?)'
-             r'([a-zA-Z_]\w*)'
-             r'(\s*)(\()', bygroups(Keyword, Name.Function, Text, Operator)),
-            (r'[^\S\n]+', Text),
-            (r'//.*?\n', Comment.Single),
-            (r'/\*.*?\*/', Comment.Multiline),
-            (r'@[a-zA-Z_][\w.]*', Name.Decorator),
-            (r'(break|case|catch|next|default|do|else|finally|for|foreach|of|'
-             r'unless|if|new|return|switch|me|throw|try|while)\b', Keyword),
-            (r'(extends|private|protected|public|static|throws|function|method|'
-             r'operator)\b', Keyword.Declaration),
-            (r'(true|false|null|__FILE__|__LINE__|__VERSION__|__LIB_PATH__|'
-             r'__INC_PATH__)\b', Keyword.Constant),
-            (r'(class|struct)(\s+)',
-             bygroups(Keyword.Declaration, Text), 'class'),
-            (r'(import|include)(\s+)',
-             bygroups(Keyword.Namespace, Text), 'import'),
-            (words((
-                'gc_collect', 'gc_mm_items', 'gc_mm_usage', 'gc_collect_threshold',
-                'urlencode', 'urldecode', 'base64encode', 'base64decode', 'sha1', 'crc32',
-                'sha2', 'md5', 'md5_file', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos',
-                'cosh', 'exp', 'fabs', 'floor', 'fmod', 'log', 'log10', 'pow', 'sin',
-                'sinh', 'sqrt', 'tan', 'tanh', 'isint', 'isfloat', 'ischar', 'isstring',
-                'isarray', 'ismap', 'isalias', 'typeof', 'sizeof', 'toint', 'tostring',
-                'fromxml', 'toxml', 'binary', 'pack', 'load', 'eval', 'var_names',
-                'var_values', 'user_functions', 'dyn_functions', 'methods', 'call',
-                'call_method', 'mknod', 'mkfifo', 'mount', 'umount2', 'umount', 'ticks',
-                'usleep', 'sleep', 'time', 'strtime', 'strdate', 'dllopen', 'dlllink',
-                'dllcall', 'dllcall_argv', 'dllclose', 'env', 'exec', 'fork', 'getpid',
-                'wait', 'popen', 'pclose', 'exit', 'kill', 'pthread_create',
-                'pthread_create_argv', 'pthread_exit', 'pthread_join', 'pthread_kill',
-                'smtp_send', 'http_get', 'http_post', 'http_download', 'socket', 'bind',
-                'listen', 'accept', 'getsockname', 'getpeername', 'settimeout', 'connect',
-                'server', 'recv', 'send', 'close', 'print', 'println', 'printf', 'input',
-                'readline', 'serial_open', 'serial_fcntl', 'serial_get_attr',
-                'serial_get_ispeed', 'serial_get_ospeed', 'serial_set_attr',
-                'serial_set_ispeed', 'serial_set_ospeed', 'serial_write', 'serial_read',
-                'serial_close', 'xml_load', 'xml_parse', 'fopen', 'fseek', 'ftell',
-                'fsize', 'fread', 'fwrite', 'fgets', 'fclose', 'file', 'readdir',
-                'pcre_replace', 'size', 'pop', 'unmap', 'has', 'keys', 'values',
-                'length', 'find', 'substr', 'replace', 'split', 'trim', 'remove',
-                'contains', 'join'), suffix=r'\b'),
-             Name.Builtin),
-            (words((
-                'MethodReference', 'Runner', 'Dll', 'Thread', 'Pipe', 'Process',
-                'Runnable', 'CGI', 'ClientSocket', 'Socket', 'ServerSocket',
-                'File', 'Console', 'Directory', 'Exception'), suffix=r'\b'),
-             Keyword.Type),
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            (r"'\\.'|'[^\\]'|'\\u[0-9a-f]{4}'", String.Char),
-            (r'(\.)([a-zA-Z_]\w*)',
-             bygroups(Operator, Name.Attribute)),
-            (r'[a-zA-Z_]\w*:', Name.Label),
-            (r'[a-zA-Z_$]\w*', Name),
-            (r'[~^*!%&\[\](){}<>|+=:;,./?\-@]+', Operator),
-            (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float),
-            (r'0x[0-9a-f]+', Number.Hex),
-            (r'[0-9]+L?', Number.Integer),
-            (r'\n', Text),
-        ],
-        'class': [
-            (r'[a-zA-Z_]\w*', Name.Class, '#pop')
-        ],
-        'import': [
-            (r'[\w.]+\*?', Name.Namespace, '#pop')
-        ],
-    }
-
-    def analyse_text(text):
-        """public method and private method don't seem to be quite common
-        elsewhere."""
-        result = 0
-        if re.search(r'\b(?:public|private)\s+method\b', text):
-            result += 0.01
-        return result
-
-
-
-class EasytrieveLexer(RegexLexer):
-    """
-    Easytrieve Plus is a programming language for extracting, filtering and
-    converting sequential data. Furthermore it can layout data for reports.
-    It is mainly used on mainframe platforms and can access several of the
-    mainframe's native file formats. It is somewhat comparable to awk.
-
-    .. versionadded:: 2.1
-    """
-    name = 'Easytrieve'
-    aliases = ['easytrieve']
-    filenames = ['*.ezt', '*.mac']
-    mimetypes = ['text/x-easytrieve']
-    flags = 0
-
-    # Note: We cannot use r'\b' at the start and end of keywords because
-    # Easytrieve Plus delimiter characters are:
-    #
-    #   * space ( )
-    #   * apostrophe (')
-    #   * period (.)
-    #   * comma (,)
-    #   * parenthesis ( and )
-    #   * colon (:)
-    #
-    # Additionally words end once a '*' appears, indicatins a comment.
-    _DELIMITERS = r' \'.,():\n'
-    _DELIMITERS_OR_COMENT = _DELIMITERS + '*'
-    _DELIMITER_PATTERN = '[' + _DELIMITERS + ']'
-    _DELIMITER_PATTERN_CAPTURE = '(' + _DELIMITER_PATTERN + ')'
-    _NON_DELIMITER_OR_COMMENT_PATTERN = '[^' + _DELIMITERS_OR_COMENT + ']'
-    _OPERATORS_PATTERN = '[.+\\-/=\\[\\](){}<>;,&%¬]'
-    _KEYWORDS = [
-        'AFTER-BREAK', 'AFTER-LINE', 'AFTER-SCREEN', 'AIM', 'AND', 'ATTR',
-        'BEFORE', 'BEFORE-BREAK', 'BEFORE-LINE', 'BEFORE-SCREEN', 'BUSHU',
-        'BY', 'CALL', 'CASE', 'CHECKPOINT', 'CHKP', 'CHKP-STATUS', 'CLEAR',
-        'CLOSE', 'COL', 'COLOR', 'COMMIT', 'CONTROL', 'COPY', 'CURSOR', 'D',
-        'DECLARE', 'DEFAULT', 'DEFINE', 'DELETE', 'DENWA', 'DISPLAY', 'DLI',
-        'DO', 'DUPLICATE', 'E', 'ELSE', 'ELSE-IF', 'END', 'END-CASE',
-        'END-DO', 'END-IF', 'END-PROC', 'ENDPAGE', 'ENDTABLE', 'ENTER', 'EOF',
-        'EQ', 'ERROR', 'EXIT', 'EXTERNAL', 'EZLIB', 'F1', 'F10', 'F11', 'F12',
-        'F13', 'F14', 'F15', 'F16', 'F17', 'F18', 'F19', 'F2', 'F20', 'F21',
-        'F22', 'F23', 'F24', 'F25', 'F26', 'F27', 'F28', 'F29', 'F3', 'F30',
-        'F31', 'F32', 'F33', 'F34', 'F35', 'F36', 'F4', 'F5', 'F6', 'F7',
-        'F8', 'F9', 'FETCH', 'FILE-STATUS', 'FILL', 'FINAL', 'FIRST',
-        'FIRST-DUP', 'FOR', 'GE', 'GET', 'GO', 'GOTO', 'GQ', 'GR', 'GT',
-        'HEADING', 'HEX', 'HIGH-VALUES', 'IDD', 'IDMS', 'IF', 'IN', 'INSERT',
-        'JUSTIFY', 'KANJI-DATE', 'KANJI-DATE-LONG', 'KANJI-TIME', 'KEY',
-        'KEY-PRESSED', 'KOKUGO', 'KUN', 'LAST-DUP', 'LE', 'LEVEL', 'LIKE',
-        'LINE', 'LINE-COUNT', 'LINE-NUMBER', 'LINK', 'LIST', 'LOW-VALUES',
-        'LQ', 'LS', 'LT', 'MACRO', 'MASK', 'MATCHED', 'MEND', 'MESSAGE',
-        'MOVE', 'MSTART', 'NE', 'NEWPAGE', 'NOMASK', 'NOPRINT', 'NOT',
-        'NOTE', 'NOVERIFY', 'NQ', 'NULL', 'OF', 'OR', 'OTHERWISE', 'PA1',
-        'PA2', 'PA3', 'PAGE-COUNT', 'PAGE-NUMBER', 'PARM-REGISTER',
-        'PATH-ID', 'PATTERN', 'PERFORM', 'POINT', 'POS', 'PRIMARY', 'PRINT',
-        'PROCEDURE', 'PROGRAM', 'PUT', 'READ', 'RECORD', 'RECORD-COUNT',
-        'RECORD-LENGTH', 'REFRESH', 'RELEASE', 'RENUM', 'REPEAT', 'REPORT',
-        'REPORT-INPUT', 'RESHOW', 'RESTART', 'RETRIEVE', 'RETURN-CODE',
-        'ROLLBACK', 'ROW', 'S', 'SCREEN', 'SEARCH', 'SECONDARY', 'SELECT',
-        'SEQUENCE', 'SIZE', 'SKIP', 'SOKAKU', 'SORT', 'SQL', 'STOP', 'SUM',
-        'SYSDATE', 'SYSDATE-LONG', 'SYSIN', 'SYSIPT', 'SYSLST', 'SYSPRINT',
-        'SYSSNAP', 'SYSTIME', 'TALLY', 'TERM-COLUMNS', 'TERM-NAME',
-        'TERM-ROWS', 'TERMINATION', 'TITLE', 'TO', 'TRANSFER', 'TRC',
-        'UNIQUE', 'UNTIL', 'UPDATE', 'UPPERCASE', 'USER', 'USERID', 'VALUE',
-        'VERIFY', 'W', 'WHEN', 'WHILE', 'WORK', 'WRITE', 'X', 'XDM', 'XRST'
-    ]
-
-    tokens = {
-        'root': [
-            (r'\*.*\n', Comment.Single),
-            (r'\n+', Whitespace),
-            # Macro argument
-            (r'&' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+\.', Name.Variable,
-             'after_macro_argument'),
-            # Macro call
-            (r'%' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Variable),
-            (r'(FILE|MACRO|REPORT)(\s+)',
-             bygroups(Keyword.Declaration, Whitespace), 'after_declaration'),
-            (r'(JOB|PARM)' + r'(' + _DELIMITER_PATTERN + r')',
-             bygroups(Keyword.Declaration, Operator)),
-            (words(_KEYWORDS, suffix=_DELIMITER_PATTERN_CAPTURE),
-             bygroups(Keyword.Reserved, Operator)),
-            (_OPERATORS_PATTERN, Operator),
-            # Procedure declaration
-            (r'(' + _NON_DELIMITER_OR_COMMENT_PATTERN + r'+)(\s*)(\.?)(\s*)(PROC)(\s*\n)',
-             bygroups(Name.Function, Whitespace, Operator, Whitespace,
-                      Keyword.Declaration, Whitespace)),
-            (r'[0-9]+\.[0-9]*', Number.Float),
-            (r'[0-9]+', Number.Integer),
-            (r"'(''|[^'])*'", String),
-            (r'\s+', Whitespace),
-            # Everything else just belongs to a name
-            (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
-         ],
-        'after_declaration': [
-            (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name.Function),
-            default('#pop'),
-        ],
-        'after_macro_argument': [
-            (r'\*.*\n', Comment.Single, '#pop'),
-            (r'\s+', Whitespace, '#pop'),
-            (_OPERATORS_PATTERN, Operator, '#pop'),
-            (r"'(''|[^'])*'", String, '#pop'),
-            # Everything else just belongs to a name
-            (_NON_DELIMITER_OR_COMMENT_PATTERN + r'+', Name),
-        ],
-    }
-    _COMMENT_LINE_REGEX = re.compile(r'^\s*\*')
-    _MACRO_HEADER_REGEX = re.compile(r'^\s*MACRO')
-
-    def analyse_text(text):
-        """
-        Perform a structural analysis for basic Easytrieve constructs.
-        """
-        result = 0.0
-        lines = text.split('\n')
-        hasEndProc = False
-        hasHeaderComment = False
-        hasFile = False
-        hasJob = False
-        hasProc = False
-        hasParm = False
-        hasReport = False
-
-        def isCommentLine(line):
-            return EasytrieveLexer._COMMENT_LINE_REGEX.match(lines[0]) is not None
-
-        def isEmptyLine(line):
-            return not bool(line.strip())
-
-        # Remove possible empty lines and header comments.
-        while lines and (isEmptyLine(lines[0]) or isCommentLine(lines[0])):
-            if not isEmptyLine(lines[0]):
-                hasHeaderComment = True
-            del lines[0]
-
-        if EasytrieveLexer._MACRO_HEADER_REGEX.match(lines[0]):
-            # Looks like an Easytrieve macro.
-            result = 0.4
-            if hasHeaderComment:
-                result += 0.4
-        else:
-            # Scan the source for lines starting with indicators.
-            for line in lines:
-                words = line.split()
-                if (len(words) >= 2):
-                    firstWord = words[0]
-                    if not hasReport:
-                        if not hasJob:
-                            if not hasFile:
-                                if not hasParm:
-                                    if firstWord == 'PARM':
-                                        hasParm = True
-                                if firstWord == 'FILE':
-                                    hasFile = True
-                            if firstWord == 'JOB':
-                                hasJob = True
-                        elif firstWord == 'PROC':
-                            hasProc = True
-                        elif firstWord == 'END-PROC':
-                            hasEndProc = True
-                        elif firstWord == 'REPORT':
-                            hasReport = True
-
-            # Weight the findings.
-            if hasJob and (hasProc == hasEndProc):
-                if hasHeaderComment:
-                    result += 0.1
-                if hasParm:
-                    if hasProc:
-                        # Found PARM, JOB and PROC/END-PROC:
-                        # pretty sure this is Easytrieve.
-                        result += 0.8
-                    else:
-                        # Found PARAM and  JOB: probably this is Easytrieve
-                        result += 0.5
-                else:
-                    # Found JOB and possibly other keywords: might be Easytrieve
-                    result += 0.11
-                    if hasParm:
-                        # Note: PARAM is not a proper English word, so this is
-                        # regarded a much better indicator for Easytrieve than
-                        # the other words.
-                        result += 0.2
-                    if hasFile:
-                        result += 0.01
-                    if hasReport:
-                        result += 0.01
-        assert 0.0 <= result <= 1.0
-        return result
-
-
-class JclLexer(RegexLexer):
-    """
-    Job Control Language (JCL)
-    is a scripting language used on mainframe platforms to instruct the system
-    on how to run a batch job or start a subsystem. It is somewhat
-    comparable to MS DOS batch and Unix shell scripts.
-
-    .. versionadded:: 2.1
-    """
-    name = 'JCL'
-    aliases = ['jcl']
-    filenames = ['*.jcl']
-    mimetypes = ['text/x-jcl']
-    flags = re.IGNORECASE
-
-    tokens = {
-        'root': [
-            (r'//\*.*\n', Comment.Single),
-            (r'//', Keyword.Pseudo, 'statement'),
-            (r'/\*', Keyword.Pseudo, 'jes2_statement'),
-            # TODO: JES3 statement
-            (r'.*\n', Other)  # Input text or inline code in any language.
-        ],
-        'statement': [
-            (r'\s*\n', Whitespace, '#pop'),
-            (r'([a-z]\w*)(\s+)(exec|job)(\s*)',
-             bygroups(Name.Label, Whitespace, Keyword.Reserved, Whitespace),
-             'option'),
-            (r'[a-z]\w*', Name.Variable, 'statement_command'),
-            (r'\s+', Whitespace, 'statement_command'),
-        ],
-        'statement_command': [
-            (r'\s+(command|cntl|dd|endctl|endif|else|include|jcllib|'
-             r'output|pend|proc|set|then|xmit)\s+', Keyword.Reserved, 'option'),
-            include('option')
-        ],
-        'jes2_statement': [
-            (r'\s*\n', Whitespace, '#pop'),
-            (r'\$', Keyword, 'option'),
-            (r'\b(jobparam|message|netacct|notify|output|priority|route|'
-             r'setup|signoff|xeq|xmit)\b', Keyword, 'option'),
-        ],
-        'option': [
-            # (r'\n', Text, 'root'),
-            (r'\*', Name.Builtin),
-            (r'[\[\](){}<>;,]', Punctuation),
-            (r'[-+*/=&%]', Operator),
-            (r'[a-z_]\w*', Name),
-            (r'\d+\.\d*', Number.Float),
-            (r'\.\d+', Number.Float),
-            (r'\d+', Number.Integer),
-            (r"'", String, 'option_string'),
-            (r'[ \t]+', Whitespace, 'option_comment'),
-            (r'\.', Punctuation),
-        ],
-        'option_string': [
-            (r"(\n)(//)", bygroups(Text, Keyword.Pseudo)),
-            (r"''", String),
-            (r"[^']", String),
-            (r"'", String, '#pop'),
-        ],
-        'option_comment': [
-            # (r'\n', Text, 'root'),
-            (r'.+', Comment.Single),
-        ]
-    }
-
-    _JOB_HEADER_PATTERN = re.compile(r'^//[a-z#$@][a-z0-9#$@]{0,7}\s+job(\s+.*)?$',
-                                     re.IGNORECASE)
-
-    def analyse_text(text):
-        """
-        Recognize JCL job by header.
-        """
-        result = 0.0
-        lines = text.split('\n')
-        if len(lines) > 0:
-            if JclLexer._JOB_HEADER_PATTERN.match(lines[0]):
-                result = 1.0
-        assert 0.0 <= result <= 1.0
-        return result
-
-
-class MiniScriptLexer(RegexLexer):
-    """
-    For MiniScript source code.
-
-    .. versionadded:: 2.6
-    """
-
-    name = 'MiniScript'
-    url = 'https://miniscript.org'
-    aliases = ['miniscript', 'ms']
-    filenames = ['*.ms']
-    mimetypes = ['text/x-minicript', 'application/x-miniscript']
-
-    tokens = {
-        'root': [
-            (r'#!(.*?)$', Comment.Preproc),
-            default('base'),
-        ],
-        'base': [
-            ('//.*$', Comment.Single),
-            (r'(?i)(\d*\.\d+|\d+\.\d*)(e[+-]?\d+)?', Number),
-            (r'(?i)\d+e[+-]?\d+', Number),
-            (r'\d+', Number),
-            (r'\n', Text),
-            (r'[^\S\n]+', Text),
-            (r'"', String, 'string_double'),
-            (r'(==|!=|<=|>=|[=+\-*/%^<>.:])', Operator),
-            (r'[;,\[\]{}()]', Punctuation),
-            (words((
-                'break', 'continue', 'else', 'end', 'for', 'function', 'if',
-                'in', 'isa', 'then', 'repeat', 'return', 'while'), suffix=r'\b'),
-             Keyword),
-            (words((
-                'abs', 'acos', 'asin', 'atan', 'ceil', 'char', 'cos', 'floor',
-                'log', 'round', 'rnd', 'pi', 'sign', 'sin', 'sqrt', 'str', 'tan',
-                'hasIndex', 'indexOf', 'len', 'val', 'code', 'remove', 'lower',
-                'upper', 'replace', 'split', 'indexes', 'values', 'join', 'sum',
-                'sort', 'shuffle', 'push', 'pop', 'pull', 'range',
-                'print', 'input', 'time', 'wait', 'locals', 'globals', 'outer',
-                'yield'), suffix=r'\b'),
-             Name.Builtin),
-            (r'(true|false|null)\b', Keyword.Constant),
-            (r'(and|or|not|new)\b', Operator.Word),
-            (r'(self|super|__isa)\b', Name.Builtin.Pseudo),
-            (r'[a-zA-Z_]\w*', Name.Variable)
-        ],
-        'string_double': [
-            (r'[^"\n]+', String),
-            (r'""', String),
-            (r'"', String, '#pop'),
-            (r'\n', Text, '#pop'),  # Stray linefeed also terminates strings.
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sgf.py b/venv/lib/python3.11/site-packages/pygments/lexers/sgf.py
deleted file mode 100644
index 0fad263..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sgf.py
+++ /dev/null
@@ -1,60 +0,0 @@
-"""
-    pygments.lexers.sgf
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Smart Game Format (sgf) file format.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Name, Literal, String, Punctuation, Whitespace
-
-__all__ = ["SmartGameFormatLexer"]
-
-
-class SmartGameFormatLexer(RegexLexer):
-    """
-    Lexer for Smart Game Format (sgf) file format.
-
-    The format is used to store game records of board games for two players
-    (mainly Go game).
-
-    .. versionadded:: 2.4
-    """
-    name = 'SmartGameFormat'
-    url = 'https://www.red-bean.com/sgf/'
-    aliases = ['sgf']
-    filenames = ['*.sgf']
-
-    tokens = {
-        'root': [
-            (r'[():;]+', Punctuation),
-            # tokens:
-            (r'(A[BW]|AE|AN|AP|AR|AS|[BW]L|BM|[BW]R|[BW]S|[BW]T|CA|CH|CP|CR|'
-             r'DD|DM|DO|DT|EL|EV|EX|FF|FG|G[BW]|GC|GM|GN|HA|HO|ID|IP|IT|IY|KM|'
-             r'KO|LB|LN|LT|L|MA|MN|M|N|OB|OM|ON|OP|OT|OV|P[BW]|PC|PL|PM|RE|RG|'
-             r'RO|RU|SO|SC|SE|SI|SL|SO|SQ|ST|SU|SZ|T[BW]|TC|TE|TM|TR|UC|US|VW|'
-             r'V|[BW]|C)',
-             Name.Builtin),
-            # number:
-            (r'(\[)([0-9.]+)(\])',
-             bygroups(Punctuation, Literal.Number, Punctuation)),
-            # date:
-            (r'(\[)([0-9]{4}-[0-9]{2}-[0-9]{2})(\])',
-             bygroups(Punctuation, Literal.Date, Punctuation)),
-            # point:
-            (r'(\[)([a-z]{2})(\])',
-             bygroups(Punctuation, String, Punctuation)),
-            # double points:
-            (r'(\[)([a-z]{2})(:)([a-z]{2})(\])',
-             bygroups(Punctuation, String, Punctuation, String, Punctuation)),
-
-            (r'(\[)([\w\s#()+,\-.:?]+)(\])',
-             bygroups(Punctuation, String, Punctuation)),
-            (r'(\[)(\s.*)(\])',
-             bygroups(Punctuation, Whitespace, Punctuation)),
-            (r'\s+', Whitespace)
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/shell.py b/venv/lib/python3.11/site-packages/pygments/lexers/shell.py
deleted file mode 100644
index eabf4ec..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/shell.py
+++ /dev/null
@@ -1,920 +0,0 @@
-"""
-    pygments.lexers.shell
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for various shells.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import re
-
-from pygments.lexer import Lexer, RegexLexer, do_insertions, bygroups, \
-    include, default, this, using, words, line_re
-from pygments.token import Punctuation, Whitespace, \
-    Text, Comment, Operator, Keyword, Name, String, Number, Generic
-from pygments.util import shebang_matches
-
-__all__ = ['BashLexer', 'BashSessionLexer', 'TcshLexer', 'BatchLexer',
-           'SlurmBashLexer', 'MSDOSSessionLexer', 'PowerShellLexer',
-           'PowerShellSessionLexer', 'TcshSessionLexer', 'FishShellLexer',
-           'ExeclineLexer']
-
-
-class BashLexer(RegexLexer):
-    """
-    Lexer for (ba|k|z|)sh shell scripts.
-
-    .. versionadded:: 0.6
-    """
-
-    name = 'Bash'
-    aliases = ['bash', 'sh', 'ksh', 'zsh', 'shell']
-    filenames = ['*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass',
-                 '*.exheres-0', '*.exlib', '*.zsh',
-                 '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc',
-                 '.kshrc', 'kshrc',
-                 'PKGBUILD']
-    mimetypes = ['application/x-sh', 'application/x-shellscript', 'text/x-shellscript']
-
-    tokens = {
-        'root': [
-            include('basic'),
-            (r'`', String.Backtick, 'backticks'),
-            include('data'),
-            include('interp'),
-        ],
-        'interp': [
-            (r'\$\(\(', Keyword, 'math'),
-            (r'\$\(', Keyword, 'paren'),
-            (r'\$\{#?', String.Interpol, 'curly'),
-            (r'\$[a-zA-Z_]\w*', Name.Variable),  # user variable
-            (r'\$(?:\d+|[#$?!_*@-])', Name.Variable),      # builtin
-            (r'\$', Text),
-        ],
-        'basic': [
-            (r'\b(if|fi|else|while|in|do|done|for|then|return|function|case|'
-             r'select|break|continue|until|esac|elif)(\s*)\b',
-             bygroups(Keyword, Whitespace)),
-            (r'\b(alias|bg|bind|builtin|caller|cd|command|compgen|'
-             r'complete|declare|dirs|disown|echo|enable|eval|exec|exit|'
-             r'export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|'
-             r'local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|'
-             r'shopt|source|suspend|test|time|times|trap|true|type|typeset|'
-             r'ulimit|umask|unalias|unset|wait)(?=[\s)`])',
-             Name.Builtin),
-            (r'\A#!.+\n', Comment.Hashbang),
-            (r'#.*\n', Comment.Single),
-            (r'\\[\w\W]', String.Escape),
-            (r'(\b\w+)(\s*)(\+?=)', bygroups(Name.Variable, Whitespace, Operator)),
-            (r'[\[\]{}()=]', Operator),
-            (r'<<<', Operator),  # here-string
-            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
-            (r'&&|\|\|', Operator),
-        ],
-        'data': [
-            (r'(?s)\$?"(\\.|[^"\\$])*"', String.Double),
-            (r'"', String.Double, 'string'),
-            (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
-            (r"(?s)'.*?'", String.Single),
-            (r';', Punctuation),
-            (r'&', Punctuation),
-            (r'\|', Punctuation),
-            (r'\s+', Whitespace),
-            (r'\d+\b', Number),
-            (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
-            (r'<', Text),
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
-            include('interp'),
-        ],
-        'curly': [
-            (r'\}', String.Interpol, '#pop'),
-            (r':-', Keyword),
-            (r'\w+', Name.Variable),
-            (r'[^}:"\'`$\\]+', Punctuation),
-            (r':', Punctuation),
-            include('root'),
-        ],
-        'paren': [
-            (r'\)', Keyword, '#pop'),
-            include('root'),
-        ],
-        'math': [
-            (r'\)\)', Keyword, '#pop'),
-            (r'\*\*|\|\||<<|>>|[-+*/%^|&<>]', Operator),
-            (r'\d+#[\da-zA-Z]+', Number),
-            (r'\d+#(?! )', Number),
-            (r'0[xX][\da-fA-F]+', Number),
-            (r'\d+', Number),
-            (r'[a-zA-Z_]\w*', Name.Variable),  # user variable
-            include('root'),
-        ],
-        'backticks': [
-            (r'`', String.Backtick, '#pop'),
-            include('root'),
-        ],
-    }
-
-    def analyse_text(text):
-        if shebang_matches(text, r'(ba|z|)sh'):
-            return 1
-        if text.startswith('$ '):
-            return 0.2
-
-
-class SlurmBashLexer(BashLexer):
-    """
-    Lexer for (ba|k|z|)sh Slurm scripts.
-
-    .. versionadded:: 2.4
-    """
-
-    name = 'Slurm'
-    aliases = ['slurm', 'sbatch']
-    filenames = ['*.sl']
-    mimetypes = []
-    EXTRA_KEYWORDS = {'srun'}
-
-    def get_tokens_unprocessed(self, text):
-        for index, token, value in BashLexer.get_tokens_unprocessed(self, text):
-            if token is Text and value in self.EXTRA_KEYWORDS:
-                yield index, Name.Builtin, value
-            elif token is Comment.Single and 'SBATCH' in value:
-                yield index, Keyword.Pseudo, value
-            else:
-                yield index, token, value
-
-
-class ShellSessionBaseLexer(Lexer):
-    """
-    Base lexer for shell sessions.
-
-    .. versionadded:: 2.1
-    """
-
-    _bare_continuation = False
-    _venv = re.compile(r'^(\([^)]*\))(\s*)')
-
-    def get_tokens_unprocessed(self, text):
-        innerlexer = self._innerLexerCls(**self.options)
-
-        pos = 0
-        curcode = ''
-        insertions = []
-        backslash_continuation = False
-
-        for match in line_re.finditer(text):
-            line = match.group()
-
-            venv_match = self._venv.match(line)
-            if venv_match:
-                venv = venv_match.group(1)
-                venv_whitespace = venv_match.group(2)
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt.VirtualEnv, venv)]))
-                if venv_whitespace:
-                    insertions.append((len(curcode),
-                                       [(0, Text, venv_whitespace)]))
-                line = line[venv_match.end():]
-
-            m = self._ps1rgx.match(line)
-            if m:
-                # To support output lexers (say diff output), the output
-                # needs to be broken by prompts whenever the output lexer
-                # changes.
-                if not insertions:
-                    pos = match.start()
-
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt, m.group(1))]))
-                curcode += m.group(2)
-                backslash_continuation = curcode.endswith('\\\n')
-            elif backslash_continuation:
-                if line.startswith(self._ps2):
-                    insertions.append((len(curcode),
-                                       [(0, Generic.Prompt,
-                                         line[:len(self._ps2)])]))
-                    curcode += line[len(self._ps2):]
-                else:
-                    curcode += line
-                backslash_continuation = curcode.endswith('\\\n')
-            elif self._bare_continuation and line.startswith(self._ps2):
-                insertions.append((len(curcode),
-                                   [(0, Generic.Prompt,
-                                     line[:len(self._ps2)])]))
-                curcode += line[len(self._ps2):]
-            else:
-                if insertions:
-                    toks = innerlexer.get_tokens_unprocessed(curcode)
-                    for i, t, v in do_insertions(insertions, toks):
-                        yield pos+i, t, v
-                yield match.start(), Generic.Output, line
-                insertions = []
-                curcode = ''
-        if insertions:
-            for i, t, v in do_insertions(insertions,
-                                         innerlexer.get_tokens_unprocessed(curcode)):
-                yield pos+i, t, v
-
-
-class BashSessionLexer(ShellSessionBaseLexer):
-    """
-    Lexer for Bash shell sessions, i.e. command lines, including a
-    prompt, interspersed with output.
-
-    .. versionadded:: 1.1
-    """
-
-    name = 'Bash Session'
-    aliases = ['console', 'shell-session']
-    filenames = ['*.sh-session', '*.shell-session']
-    mimetypes = ['application/x-shell-session', 'application/x-sh-session']
-
-    _innerLexerCls = BashLexer
-    _ps1rgx = re.compile(
-        r'^((?:(?:\[.*?\])|(?:\(\S+\))?(?:| |sh\S*?|\w+\S+[@:]\S+(?:\s+\S+)' \
-        r'?|\[\S+[@:][^\n]+\].+))\s*[$#%]\s*)(.*\n?)')
-    _ps2 = '> '
-
-
-class BatchLexer(RegexLexer):
-    """
-    Lexer for the DOS/Windows Batch file format.
-
-    .. versionadded:: 0.7
-    """
-    name = 'Batchfile'
-    aliases = ['batch', 'bat', 'dosbatch', 'winbatch']
-    filenames = ['*.bat', '*.cmd']
-    mimetypes = ['application/x-dos-batch']
-
-    flags = re.MULTILINE | re.IGNORECASE
-
-    _nl = r'\n\x1a'
-    _punct = r'&<>|'
-    _ws = r'\t\v\f\r ,;=\xa0'
-    _nlws = r'\s\x1a\xa0,;='
-    _space = r'(?:(?:(?:\^[%s])?[%s])+)' % (_nl, _ws)
-    _keyword_terminator = (r'(?=(?:\^[%s]?)?[%s+./:[\\\]]|[%s%s(])' %
-                           (_nl, _ws, _nl, _punct))
-    _token_terminator = r'(?=\^?[%s]|[%s%s])' % (_ws, _punct, _nl)
-    _start_label = r'((?:(?<=^[^:])|^[^:]?)[%s]*)(:)' % _ws
-    _label = r'(?:(?:[^%s%s+:^]|\^[%s]?[\w\W])*)' % (_nlws, _punct, _nl)
-    _label_compound = r'(?:(?:[^%s%s+:^)]|\^[%s]?[^)])*)' % (_nlws, _punct, _nl)
-    _number = r'(?:-?(?:0[0-7]+|0x[\da-f]+|\d+)%s)' % _token_terminator
-    _opword = r'(?:equ|geq|gtr|leq|lss|neq)'
-    _string = r'(?:"[^%s"]*(?:"|(?=[%s])))' % (_nl, _nl)
-    _variable = (r'(?:(?:%%(?:\*|(?:~[a-z]*(?:\$[^:]+:)?)?\d|'
-                 r'[^%%:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:[^%%%s^]|'
-                 r'\^[^%%%s])[^=%s]*=(?:[^%%%s^]|\^[^%%%s])*)?)?%%))|'
-                 r'(?:\^?![^!:%s]+(?::(?:~(?:-?\d+)?(?:,(?:-?\d+)?)?|(?:'
-                 r'[^!%s^]|\^[^!%s])[^=%s]*=(?:[^!%s^]|\^[^!%s])*)?)?\^?!))' %
-                 (_nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl, _nl))
-    _core_token = r'(?:(?:(?:\^[%s]?)?[^"%s%s])+)' % (_nl, _nlws, _punct)
-    _core_token_compound = r'(?:(?:(?:\^[%s]?)?[^"%s%s)])+)' % (_nl, _nlws, _punct)
-    _token = r'(?:[%s]+|%s)' % (_punct, _core_token)
-    _token_compound = r'(?:[%s]+|%s)' % (_punct, _core_token_compound)
-    _stoken = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
-               (_punct, _string, _variable, _core_token))
-
-    def _make_begin_state(compound, _core_token=_core_token,
-                          _core_token_compound=_core_token_compound,
-                          _keyword_terminator=_keyword_terminator,
-                          _nl=_nl, _punct=_punct, _string=_string,
-                          _space=_space, _start_label=_start_label,
-                          _stoken=_stoken, _token_terminator=_token_terminator,
-                          _variable=_variable, _ws=_ws):
-        rest = '(?:%s|%s|[^"%%%s%s%s])*' % (_string, _variable, _nl, _punct,
-                                            ')' if compound else '')
-        rest_of_line = r'(?:(?:[^%s^]|\^[%s]?[\w\W])*)' % (_nl, _nl)
-        rest_of_line_compound = r'(?:(?:[^%s^)]|\^[%s]?[^)])*)' % (_nl, _nl)
-        set_space = r'((?:(?:\^[%s]?)?[^\S\n])*)' % _nl
-        suffix = ''
-        if compound:
-            _keyword_terminator = r'(?:(?=\))|%s)' % _keyword_terminator
-            _token_terminator = r'(?:(?=\))|%s)' % _token_terminator
-            suffix = '/compound'
-        return [
-            ((r'\)', Punctuation, '#pop') if compound else
-             (r'\)((?=\()|%s)%s' % (_token_terminator, rest_of_line),
-              Comment.Single)),
-            (r'(?=%s)' % _start_label, Text, 'follow%s' % suffix),
-            (_space, using(this, state='text')),
-            include('redirect%s' % suffix),
-            (r'[%s]+' % _nl, Text),
-            (r'\(', Punctuation, 'root/compound'),
-            (r'@+', Punctuation),
-            (r'((?:for|if|rem)(?:(?=(?:\^[%s]?)?/)|(?:(?!\^)|'
-             r'(?<=m))(?:(?=\()|%s)))(%s?%s?(?:\^[%s]?)?/(?:\^[%s]?)?\?)' %
-             (_nl, _token_terminator, _space,
-              _core_token_compound if compound else _core_token, _nl, _nl),
-             bygroups(Keyword, using(this, state='text')),
-             'follow%s' % suffix),
-            (r'(goto%s)(%s(?:\^[%s]?)?/(?:\^[%s]?)?\?%s)' %
-             (_keyword_terminator, rest, _nl, _nl, rest),
-             bygroups(Keyword, using(this, state='text')),
-             'follow%s' % suffix),
-            (words(('assoc', 'break', 'cd', 'chdir', 'cls', 'color', 'copy',
-                    'date', 'del', 'dir', 'dpath', 'echo', 'endlocal', 'erase',
-                    'exit', 'ftype', 'keys', 'md', 'mkdir', 'mklink', 'move',
-                    'path', 'pause', 'popd', 'prompt', 'pushd', 'rd', 'ren',
-                    'rename', 'rmdir', 'setlocal', 'shift', 'start', 'time',
-                    'title', 'type', 'ver', 'verify', 'vol'),
-                   suffix=_keyword_terminator), Keyword, 'follow%s' % suffix),
-            (r'(call)(%s?)(:)' % _space,
-             bygroups(Keyword, using(this, state='text'), Punctuation),
-             'call%s' % suffix),
-            (r'call%s' % _keyword_terminator, Keyword),
-            (r'(for%s(?!\^))(%s)(/f%s)' %
-             (_token_terminator, _space, _token_terminator),
-             bygroups(Keyword, using(this, state='text'), Keyword),
-             ('for/f', 'for')),
-            (r'(for%s(?!\^))(%s)(/l%s)' %
-             (_token_terminator, _space, _token_terminator),
-             bygroups(Keyword, using(this, state='text'), Keyword),
-             ('for/l', 'for')),
-            (r'for%s(?!\^)' % _token_terminator, Keyword, ('for2', 'for')),
-            (r'(goto%s)(%s?)(:?)' % (_keyword_terminator, _space),
-             bygroups(Keyword, using(this, state='text'), Punctuation),
-             'label%s' % suffix),
-            (r'(if(?:(?=\()|%s)(?!\^))(%s?)((?:/i%s)?)(%s?)((?:not%s)?)(%s?)' %
-             (_token_terminator, _space, _token_terminator, _space,
-              _token_terminator, _space),
-             bygroups(Keyword, using(this, state='text'), Keyword,
-                      using(this, state='text'), Keyword,
-                      using(this, state='text')), ('(?', 'if')),
-            (r'rem(((?=\()|%s)%s?%s?.*|%s%s)' %
-             (_token_terminator, _space, _stoken, _keyword_terminator,
-              rest_of_line_compound if compound else rest_of_line),
-             Comment.Single, 'follow%s' % suffix),
-            (r'(set%s)%s(/a)' % (_keyword_terminator, set_space),
-             bygroups(Keyword, using(this, state='text'), Keyword),
-             'arithmetic%s' % suffix),
-            (r'(set%s)%s((?:/p)?)%s((?:(?:(?:\^[%s]?)?[^"%s%s^=%s]|'
-             r'\^[%s]?[^"=])+)?)((?:(?:\^[%s]?)?=)?)' %
-             (_keyword_terminator, set_space, set_space, _nl, _nl, _punct,
-              ')' if compound else '', _nl, _nl),
-             bygroups(Keyword, using(this, state='text'), Keyword,
-                      using(this, state='text'), using(this, state='variable'),
-                      Punctuation),
-             'follow%s' % suffix),
-            default('follow%s' % suffix)
-        ]
-
-    def _make_follow_state(compound, _label=_label,
-                           _label_compound=_label_compound, _nl=_nl,
-                           _space=_space, _start_label=_start_label,
-                           _token=_token, _token_compound=_token_compound,
-                           _ws=_ws):
-        suffix = '/compound' if compound else ''
-        state = []
-        if compound:
-            state.append((r'(?=\))', Text, '#pop'))
-        state += [
-            (r'%s([%s]*)(%s)(.*)' %
-             (_start_label, _ws, _label_compound if compound else _label),
-             bygroups(Text, Punctuation, Text, Name.Label, Comment.Single)),
-            include('redirect%s' % suffix),
-            (r'(?=[%s])' % _nl, Text, '#pop'),
-            (r'\|\|?|&&?', Punctuation, '#pop'),
-            include('text')
-        ]
-        return state
-
-    def _make_arithmetic_state(compound, _nl=_nl, _punct=_punct,
-                               _string=_string, _variable=_variable,
-                               _ws=_ws, _nlws=_nlws):
-        op = r'=+\-*/!~'
-        state = []
-        if compound:
-            state.append((r'(?=\))', Text, '#pop'))
-        state += [
-            (r'0[0-7]+', Number.Oct),
-            (r'0x[\da-f]+', Number.Hex),
-            (r'\d+', Number.Integer),
-            (r'[(),]+', Punctuation),
-            (r'([%s]|%%|\^\^)+' % op, Operator),
-            (r'(%s|%s|(\^[%s]?)?[^()%s%%\^"%s%s]|\^[%s]?%s)+' %
-             (_string, _variable, _nl, op, _nlws, _punct, _nlws,
-              r'[^)]' if compound else r'[\w\W]'),
-             using(this, state='variable')),
-            (r'(?=[\x00|&])', Text, '#pop'),
-            include('follow')
-        ]
-        return state
-
-    def _make_call_state(compound, _label=_label,
-                         _label_compound=_label_compound):
-        state = []
-        if compound:
-            state.append((r'(?=\))', Text, '#pop'))
-        state.append((r'(:?)(%s)' % (_label_compound if compound else _label),
-                      bygroups(Punctuation, Name.Label), '#pop'))
-        return state
-
-    def _make_label_state(compound, _label=_label,
-                          _label_compound=_label_compound, _nl=_nl,
-                          _punct=_punct, _string=_string, _variable=_variable):
-        state = []
-        if compound:
-            state.append((r'(?=\))', Text, '#pop'))
-        state.append((r'(%s?)((?:%s|%s|\^[%s]?%s|[^"%%^%s%s%s])*)' %
-                      (_label_compound if compound else _label, _string,
-                       _variable, _nl, r'[^)]' if compound else r'[\w\W]', _nl,
-                       _punct, r')' if compound else ''),
-                      bygroups(Name.Label, Comment.Single), '#pop'))
-        return state
-
-    def _make_redirect_state(compound,
-                             _core_token_compound=_core_token_compound,
-                             _nl=_nl, _punct=_punct, _stoken=_stoken,
-                             _string=_string, _space=_space,
-                             _variable=_variable, _nlws=_nlws):
-        stoken_compound = (r'(?:[%s]+|(?:%s|%s|%s)+)' %
-                           (_punct, _string, _variable, _core_token_compound))
-        return [
-            (r'((?:(?<=[%s])\d)?)(>>?&|<&)([%s]*)(\d)' %
-             (_nlws, _nlws),
-             bygroups(Number.Integer, Punctuation, Text, Number.Integer)),
-            (r'((?:(?<=[%s])(?>?|<)(%s?%s)' %
-             (_nlws, _nl, _space, stoken_compound if compound else _stoken),
-             bygroups(Number.Integer, Punctuation, using(this, state='text')))
-        ]
-
-    tokens = {
-        'root': _make_begin_state(False),
-        'follow': _make_follow_state(False),
-        'arithmetic': _make_arithmetic_state(False),
-        'call': _make_call_state(False),
-        'label': _make_label_state(False),
-        'redirect': _make_redirect_state(False),
-        'root/compound': _make_begin_state(True),
-        'follow/compound': _make_follow_state(True),
-        'arithmetic/compound': _make_arithmetic_state(True),
-        'call/compound': _make_call_state(True),
-        'label/compound': _make_label_state(True),
-        'redirect/compound': _make_redirect_state(True),
-        'variable-or-escape': [
-            (_variable, Name.Variable),
-            (r'%%%%|\^[%s]?(\^!|[\w\W])' % _nl, String.Escape)
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (_variable, Name.Variable),
-            (r'\^!|%%', String.Escape),
-            (r'[^"%%^%s]+|[%%^]' % _nl, String.Double),
-            default('#pop')
-        ],
-        'sqstring': [
-            include('variable-or-escape'),
-            (r'[^%]+|%', String.Single)
-        ],
-        'bqstring': [
-            include('variable-or-escape'),
-            (r'[^%]+|%', String.Backtick)
-        ],
-        'text': [
-            (r'"', String.Double, 'string'),
-            include('variable-or-escape'),
-            (r'[^"%%^%s%s\d)]+|.' % (_nlws, _punct), Text)
-        ],
-        'variable': [
-            (r'"', String.Double, 'string'),
-            include('variable-or-escape'),
-            (r'[^"%%^%s]+|.' % _nl, Name.Variable)
-        ],
-        'for': [
-            (r'(%s)(in)(%s)(\()' % (_space, _space),
-             bygroups(using(this, state='text'), Keyword,
-                      using(this, state='text'), Punctuation), '#pop'),
-            include('follow')
-        ],
-        'for2': [
-            (r'\)', Punctuation),
-            (r'(%s)(do%s)' % (_space, _token_terminator),
-             bygroups(using(this, state='text'), Keyword), '#pop'),
-            (r'[%s]+' % _nl, Text),
-            include('follow')
-        ],
-        'for/f': [
-            (r'(")((?:%s|[^"])*?")([%s]*)(\))' % (_variable, _nlws),
-             bygroups(String.Double, using(this, state='string'), Text,
-                      Punctuation)),
-            (r'"', String.Double, ('#pop', 'for2', 'string')),
-            (r"('(?:%%%%|%s|[\w\W])*?')([%s]*)(\))" % (_variable, _nlws),
-             bygroups(using(this, state='sqstring'), Text, Punctuation)),
-            (r'(`(?:%%%%|%s|[\w\W])*?`)([%s]*)(\))' % (_variable, _nlws),
-             bygroups(using(this, state='bqstring'), Text, Punctuation)),
-            include('for2')
-        ],
-        'for/l': [
-            (r'-?\d+', Number.Integer),
-            include('for2')
-        ],
-        'if': [
-            (r'((?:cmdextversion|errorlevel)%s)(%s)(\d+)' %
-             (_token_terminator, _space),
-             bygroups(Keyword, using(this, state='text'),
-                      Number.Integer), '#pop'),
-            (r'(defined%s)(%s)(%s)' % (_token_terminator, _space, _stoken),
-             bygroups(Keyword, using(this, state='text'),
-                      using(this, state='variable')), '#pop'),
-            (r'(exist%s)(%s%s)' % (_token_terminator, _space, _stoken),
-             bygroups(Keyword, using(this, state='text')), '#pop'),
-            (r'(%s%s)(%s)(%s%s)' % (_number, _space, _opword, _space, _number),
-             bygroups(using(this, state='arithmetic'), Operator.Word,
-                      using(this, state='arithmetic')), '#pop'),
-            (_stoken, using(this, state='text'), ('#pop', 'if2')),
-        ],
-        'if2': [
-            (r'(%s?)(==)(%s?%s)' % (_space, _space, _stoken),
-             bygroups(using(this, state='text'), Operator,
-                      using(this, state='text')), '#pop'),
-            (r'(%s)(%s)(%s%s)' % (_space, _opword, _space, _stoken),
-             bygroups(using(this, state='text'), Operator.Word,
-                      using(this, state='text')), '#pop')
-        ],
-        '(?': [
-            (_space, using(this, state='text')),
-            (r'\(', Punctuation, ('#pop', 'else?', 'root/compound')),
-            default('#pop')
-        ],
-        'else?': [
-            (_space, using(this, state='text')),
-            (r'else%s' % _token_terminator, Keyword, '#pop'),
-            default('#pop')
-        ]
-    }
-
-
-class MSDOSSessionLexer(ShellSessionBaseLexer):
-    """
-    Lexer for MS DOS shell sessions, i.e. command lines, including a
-    prompt, interspersed with output.
-
-    .. versionadded:: 2.1
-    """
-
-    name = 'MSDOS Session'
-    aliases = ['doscon']
-    filenames = []
-    mimetypes = []
-
-    _innerLexerCls = BatchLexer
-    _ps1rgx = re.compile(r'^([^>]*>)(.*\n?)')
-    _ps2 = 'More? '
-
-
-class TcshLexer(RegexLexer):
-    """
-    Lexer for tcsh scripts.
-
-    .. versionadded:: 0.10
-    """
-
-    name = 'Tcsh'
-    aliases = ['tcsh', 'csh']
-    filenames = ['*.tcsh', '*.csh']
-    mimetypes = ['application/x-csh']
-
-    tokens = {
-        'root': [
-            include('basic'),
-            (r'\$\(', Keyword, 'paren'),
-            (r'\$\{#?', Keyword, 'curly'),
-            (r'`', String.Backtick, 'backticks'),
-            include('data'),
-        ],
-        'basic': [
-            (r'\b(if|endif|else|while|then|foreach|case|default|'
-             r'break|continue|goto|breaksw|end|switch|endsw)\s*\b',
-             Keyword),
-            (r'\b(alias|alloc|bg|bindkey|builtins|bye|caller|cd|chdir|'
-             r'complete|dirs|echo|echotc|eval|exec|exit|fg|filetest|getxvers|'
-             r'glob|getspath|hashstat|history|hup|inlib|jobs|kill|'
-             r'limit|log|login|logout|ls-F|migrate|newgrp|nice|nohup|notify|'
-             r'onintr|popd|printenv|pushd|rehash|repeat|rootnode|popd|pushd|'
-             r'set|shift|sched|setenv|setpath|settc|setty|setxvers|shift|'
-             r'source|stop|suspend|source|suspend|telltc|time|'
-             r'umask|unalias|uncomplete|unhash|universe|unlimit|unset|unsetenv|'
-             r'ver|wait|warp|watchlog|where|which)\s*\b',
-             Name.Builtin),
-            (r'#.*', Comment),
-            (r'\\[\w\W]', String.Escape),
-            (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Text, Operator)),
-            (r'[\[\]{}()=]+', Operator),
-            (r'<<\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
-            (r';', Punctuation),
-        ],
-        'data': [
-            (r'(?s)"(\\\\|\\[0-7]+|\\.|[^"\\])*"', String.Double),
-            (r"(?s)'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
-            (r'\s+', Text),
-            (r'[^=\s\[\]{}()$"\'`\\;#]+', Text),
-            (r'\d+(?= |\Z)', Number),
-            (r'\$#?(\w+|.)', Name.Variable),
-        ],
-        'curly': [
-            (r'\}', Keyword, '#pop'),
-            (r':-', Keyword),
-            (r'\w+', Name.Variable),
-            (r'[^}:"\'`$]+', Punctuation),
-            (r':', Punctuation),
-            include('root'),
-        ],
-        'paren': [
-            (r'\)', Keyword, '#pop'),
-            include('root'),
-        ],
-        'backticks': [
-            (r'`', String.Backtick, '#pop'),
-            include('root'),
-        ],
-    }
-
-
-class TcshSessionLexer(ShellSessionBaseLexer):
-    """
-    Lexer for Tcsh sessions, i.e. command lines, including a
-    prompt, interspersed with output.
-
-    .. versionadded:: 2.1
-    """
-
-    name = 'Tcsh Session'
-    aliases = ['tcshcon']
-    filenames = []
-    mimetypes = []
-
-    _innerLexerCls = TcshLexer
-    _ps1rgx = re.compile(r'^([^>]+>)(.*\n?)')
-    _ps2 = '? '
-
-
-class PowerShellLexer(RegexLexer):
-    """
-    For Windows PowerShell code.
-
-    .. versionadded:: 1.5
-    """
-    name = 'PowerShell'
-    aliases = ['powershell', 'pwsh', 'posh', 'ps1', 'psm1']
-    filenames = ['*.ps1', '*.psm1']
-    mimetypes = ['text/x-powershell']
-
-    flags = re.DOTALL | re.IGNORECASE | re.MULTILINE
-
-    keywords = (
-        'while validateset validaterange validatepattern validatelength '
-        'validatecount until trap switch return ref process param parameter in '
-        'if global: local: function foreach for finally filter end elseif else '
-        'dynamicparam do default continue cmdletbinding break begin alias \\? '
-        '% #script #private #local #global mandatory parametersetname position '
-        'valuefrompipeline valuefrompipelinebypropertyname '
-        'valuefromremainingarguments helpmessage try catch throw').split()
-
-    operators = (
-        'and as band bnot bor bxor casesensitive ccontains ceq cge cgt cle '
-        'clike clt cmatch cne cnotcontains cnotlike cnotmatch contains '
-        'creplace eq exact f file ge gt icontains ieq ige igt ile ilike ilt '
-        'imatch ine inotcontains inotlike inotmatch ireplace is isnot le like '
-        'lt match ne not notcontains notlike notmatch or regex replace '
-        'wildcard').split()
-
-    verbs = (
-        'write where watch wait use update unregister unpublish unprotect '
-        'unlock uninstall undo unblock trace test tee take sync switch '
-        'suspend submit stop step start split sort skip show set send select '
-        'search scroll save revoke resume restore restart resolve resize '
-        'reset request repair rename remove register redo receive read push '
-        'publish protect pop ping out optimize open new move mount merge '
-        'measure lock limit join invoke install initialize import hide group '
-        'grant get format foreach find export expand exit enter enable edit '
-        'dismount disconnect disable deny debug cxnew copy convertto '
-        'convertfrom convert connect confirm compress complete compare close '
-        'clear checkpoint block backup assert approve aggregate add').split()
-
-    aliases_ = (
-        'ac asnp cat cd cfs chdir clc clear clhy cli clp cls clv cnsn '
-        'compare copy cp cpi cpp curl cvpa dbp del diff dir dnsn ebp echo epal '
-        'epcsv epsn erase etsn exsn fc fhx fl foreach ft fw gal gbp gc gci gcm '
-        'gcs gdr ghy gi gjb gl gm gmo gp gps gpv group gsn gsnp gsv gu gv gwmi '
-        'h history icm iex ihy ii ipal ipcsv ipmo ipsn irm ise iwmi iwr kill lp '
-        'ls man md measure mi mount move mp mv nal ndr ni nmo npssc nsn nv ogv '
-        'oh popd ps pushd pwd r rbp rcjb rcsn rd rdr ren ri rjb rm rmdir rmo '
-        'rni rnp rp rsn rsnp rujb rv rvpa rwmi sajb sal saps sasv sbp sc select '
-        'set shcm si sl sleep sls sort sp spjb spps spsv start sujb sv swmi tee '
-        'trcm type wget where wjb write').split()
-
-    commenthelp = (
-        'component description example externalhelp forwardhelpcategory '
-        'forwardhelptargetname functionality inputs link '
-        'notes outputs parameter remotehelprunspace role synopsis').split()
-
-    tokens = {
-        'root': [
-            # we need to count pairs of parentheses for correct highlight
-            # of '$(...)' blocks in strings
-            (r'\(', Punctuation, 'child'),
-            (r'\s+', Text),
-            (r'^(\s*#[#\s]*)(\.(?:%s))([^\n]*$)' % '|'.join(commenthelp),
-             bygroups(Comment, String.Doc, Comment)),
-            (r'#[^\n]*?$', Comment),
-            (r'(<|<)#', Comment.Multiline, 'multline'),
-            (r'@"\n', String.Heredoc, 'heredoc-double'),
-            (r"@'\n.*?\n'@", String.Heredoc),
-            # escaped syntax
-            (r'`[\'"$@-]', Punctuation),
-            (r'"', String.Double, 'string'),
-            (r"'([^']|'')*'", String.Single),
-            (r'(\$|@@|@)((global|script|private|env):)?\w+',
-             Name.Variable),
-            (r'(%s)\b' % '|'.join(keywords), Keyword),
-            (r'-(%s)\b' % '|'.join(operators), Operator),
-            (r'(%s)-[a-z_]\w*\b' % '|'.join(verbs), Name.Builtin),
-            (r'(%s)\s' % '|'.join(aliases_), Name.Builtin),
-            (r'\[[a-z_\[][\w. `,\[\]]*\]', Name.Constant),  # .net [type]s
-            (r'-[a-z_]\w*', Name),
-            (r'\w+', Name),
-            (r'[.,;:@{}\[\]$()=+*/\\&%!~?^`|<>-]', Punctuation),
-        ],
-        'child': [
-            (r'\)', Punctuation, '#pop'),
-            include('root'),
-        ],
-        'multline': [
-            (r'[^#&.]+', Comment.Multiline),
-            (r'#(>|>)', Comment.Multiline, '#pop'),
-            (r'\.(%s)' % '|'.join(commenthelp), String.Doc),
-            (r'[#&.]', Comment.Multiline),
-        ],
-        'string': [
-            (r"`[0abfnrtv'\"$`]", String.Escape),
-            (r'[^$`"]+', String.Double),
-            (r'\$\(', Punctuation, 'child'),
-            (r'""', String.Double),
-            (r'[`$]', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'heredoc-double': [
-            (r'\n"@', String.Heredoc, '#pop'),
-            (r'\$\(', Punctuation, 'child'),
-            (r'[^@\n]+"]', String.Heredoc),
-            (r".", String.Heredoc),
-        ]
-    }
-
-
-class PowerShellSessionLexer(ShellSessionBaseLexer):
-    """
-    Lexer for PowerShell sessions, i.e. command lines, including a
-    prompt, interspersed with output.
-
-    .. versionadded:: 2.1
-    """
-
-    name = 'PowerShell Session'
-    aliases = ['pwsh-session', 'ps1con']
-    filenames = []
-    mimetypes = []
-
-    _innerLexerCls = PowerShellLexer
-    _bare_continuation = True
-    _ps1rgx = re.compile(r'^((?:\[[^]]+\]: )?PS[^>]*> ?)(.*\n?)')
-    _ps2 = '> '
-
-
-class FishShellLexer(RegexLexer):
-    """
-    Lexer for Fish shell scripts.
-
-    .. versionadded:: 2.1
-    """
-
-    name = 'Fish'
-    aliases = ['fish', 'fishshell']
-    filenames = ['*.fish', '*.load']
-    mimetypes = ['application/x-fish']
-
-    tokens = {
-        'root': [
-            include('basic'),
-            include('data'),
-            include('interp'),
-        ],
-        'interp': [
-            (r'\$\(\(', Keyword, 'math'),
-            (r'\(', Keyword, 'paren'),
-            (r'\$#?(\w+|.)', Name.Variable),
-        ],
-        'basic': [
-            (r'\b(begin|end|if|else|while|break|for|in|return|function|block|'
-             r'case|continue|switch|not|and|or|set|echo|exit|pwd|true|false|'
-             r'cd|count|test)(\s*)\b',
-             bygroups(Keyword, Text)),
-            (r'\b(alias|bg|bind|breakpoint|builtin|command|commandline|'
-             r'complete|contains|dirh|dirs|emit|eval|exec|fg|fish|fish_config|'
-             r'fish_indent|fish_pager|fish_prompt|fish_right_prompt|'
-             r'fish_update_completions|fishd|funced|funcsave|functions|help|'
-             r'history|isatty|jobs|math|mimedb|nextd|open|popd|prevd|psub|'
-             r'pushd|random|read|set_color|source|status|trap|type|ulimit|'
-             r'umask|vared|fc|getopts|hash|kill|printf|time|wait)\s*\b(?!\.)',
-             Name.Builtin),
-            (r'#.*\n', Comment),
-            (r'\\[\w\W]', String.Escape),
-            (r'(\b\w+)(\s*)(=)', bygroups(Name.Variable, Whitespace, Operator)),
-            (r'[\[\]()=]', Operator),
-            (r'<<-?\s*(\'?)\\?(\w+)[\w\W]+?\2', String),
-        ],
-        'data': [
-            (r'(?s)\$?"(\\\\|\\[0-7]+|\\.|[^"\\$])*"', String.Double),
-            (r'"', String.Double, 'string'),
-            (r"(?s)\$'(\\\\|\\[0-7]+|\\.|[^'\\])*'", String.Single),
-            (r"(?s)'.*?'", String.Single),
-            (r';', Punctuation),
-            (r'&|\||\^|<|>', Operator),
-            (r'\s+', Text),
-            (r'\d+(?= |\Z)', Number),
-            (r'[^=\s\[\]{}()$"\'`\\<&|;]+', Text),
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'(?s)(\\\\|\\[0-7]+|\\.|[^"\\$])+', String.Double),
-            include('interp'),
-        ],
-        'paren': [
-            (r'\)', Keyword, '#pop'),
-            include('root'),
-        ],
-        'math': [
-            (r'\)\)', Keyword, '#pop'),
-            (r'[-+*/%^|&]|\*\*|\|\|', Operator),
-            (r'\d+#\d+', Number),
-            (r'\d+#(?! )', Number),
-            (r'\d+', Number),
-            include('root'),
-        ],
-    }
-
-class ExeclineLexer(RegexLexer):
-    """
-    Lexer for Laurent Bercot's execline language
-    (https://skarnet.org/software/execline).
-
-    .. versionadded:: 2.7
-    """
-
-    name = 'execline'
-    aliases = ['execline']
-    filenames = ['*.exec']
-
-    tokens = {
-        'root': [
-            include('basic'),
-            include('data'),
-            include('interp')
-        ],
-        'interp': [
-            (r'\$\{', String.Interpol, 'curly'),
-            (r'\$[\w@#]+', Name.Variable),  # user variable
-            (r'\$', Text),
-        ],
-        'basic': [
-            (r'\b(background|backtick|cd|define|dollarat|elgetopt|'
-             r'elgetpositionals|elglob|emptyenv|envfile|exec|execlineb|'
-             r'exit|export|fdblock|fdclose|fdmove|fdreserve|fdswap|'
-             r'forbacktickx|foreground|forstdin|forx|getcwd|getpid|heredoc|'
-             r'homeof|if|ifelse|ifte|ifthenelse|importas|loopwhilex|'
-             r'multidefine|multisubstitute|pipeline|piperw|posix-cd|'
-             r'redirfd|runblock|shift|trap|tryexec|umask|unexport|wait|'
-             r'withstdinas)\b', Name.Builtin),
-            (r'\A#!.+\n', Comment.Hashbang),
-            (r'#.*\n', Comment.Single),
-            (r'[{}]', Operator)
-        ],
-        'data': [
-            (r'(?s)"(\\.|[^"\\$])*"', String.Double),
-            (r'"', String.Double, 'string'),
-            (r'\s+', Text),
-            (r'[^\s{}$"\\]+', Text)
-        ],
-        'string': [
-            (r'"', String.Double, '#pop'),
-            (r'(?s)(\\\\|\\.|[^"\\$])+', String.Double),
-            include('interp'),
-        ],
-        'curly': [
-            (r'\}', String.Interpol, '#pop'),
-            (r'[\w#@]+', Name.Variable),
-            include('root')
-        ]
-
-    }
-
-    def analyse_text(text):
-        if shebang_matches(text, r'execlineb'):
-            return 1
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sieve.py b/venv/lib/python3.11/site-packages/pygments/lexers/sieve.py
deleted file mode 100644
index 8287b07..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sieve.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-    pygments.lexers.sieve
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Sieve file format.
-
-    https://tools.ietf.org/html/rfc5228
-    https://tools.ietf.org/html/rfc5173
-    https://tools.ietf.org/html/rfc5229
-    https://tools.ietf.org/html/rfc5230
-    https://tools.ietf.org/html/rfc5232
-    https://tools.ietf.org/html/rfc5235
-    https://tools.ietf.org/html/rfc5429
-    https://tools.ietf.org/html/rfc8580
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Comment, Name, Literal, String, Text, Punctuation, \
-    Keyword
-
-__all__ = ["SieveLexer"]
-
-
-class SieveLexer(RegexLexer):
-    """
-    Lexer for sieve format.
-
-    .. versionadded:: 2.6
-    """
-    name = 'Sieve'
-    filenames = ['*.siv', '*.sieve']
-    aliases = ['sieve']
-
-    tokens = {
-        'root': [
-            (r'\s+', Text),
-            (r'[();,{}\[\]]', Punctuation),
-            # import:
-            (r'(?i)require',
-             Keyword.Namespace),
-            # tags:
-            (r'(?i)(:)(addresses|all|contains|content|create|copy|comparator|'
-             r'count|days|detail|domain|fcc|flags|from|handle|importance|is|'
-             r'localpart|length|lowerfirst|lower|matches|message|mime|options|'
-             r'over|percent|quotewildcard|raw|regex|specialuse|subject|text|'
-             r'under|upperfirst|upper|value)',
-             bygroups(Name.Tag, Name.Tag)),
-            # tokens:
-            (r'(?i)(address|addflag|allof|anyof|body|discard|elsif|else|envelope|'
-             r'ereject|exists|false|fileinto|if|hasflag|header|keep|'
-             r'notify_method_capability|notify|not|redirect|reject|removeflag|'
-             r'setflag|size|spamtest|stop|string|true|vacation|virustest)',
-             Name.Builtin),
-            (r'(?i)set',
-             Keyword.Declaration),
-            # number:
-            (r'([0-9.]+)([kmgKMG])?',
-             bygroups(Literal.Number, Literal.Number)),
-            # comment:
-            (r'#.*$',
-             Comment.Single),
-            (r'/\*.*\*/',
-             Comment.Multiline),
-            # string:
-            (r'"[^"]*?"',
-             String),
-            # text block:
-            (r'text:',
-             Name.Tag, 'text'),
-        ],
-        'text': [
-            (r'[^.].*?\n', String),
-            (r'^\.', Punctuation, "#pop"),
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/slash.py b/venv/lib/python3.11/site-packages/pygments/lexers/slash.py
deleted file mode 100644
index cce47ce..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/slash.py
+++ /dev/null
@@ -1,184 +0,0 @@
-"""
-    pygments.lexers.slash
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for the `Slash `_ programming
-    language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import ExtendedRegexLexer, bygroups, DelegatingLexer
-from pygments.token import Name, Number, String, Comment, Punctuation, \
-    Other, Keyword, Operator, Whitespace
-
-__all__ = ['SlashLexer']
-
-
-class SlashLanguageLexer(ExtendedRegexLexer):
-    _nkw = r'(?=[^a-zA-Z_0-9])'
-
-    def move_state(new_state):
-        return ("#pop", new_state)
-
-    def right_angle_bracket(lexer, match, ctx):
-        if len(ctx.stack) > 1 and ctx.stack[-2] == "string":
-            ctx.stack.pop()
-        yield match.start(), String.Interpol, '}'
-        ctx.pos = match.end()
-        pass
-
-    tokens = {
-        "root": [
-            (r"<%=",        Comment.Preproc,    move_state("slash")),
-            (r"<%!!",       Comment.Preproc,    move_state("slash")),
-            (r"<%#.*?%>",   Comment.Multiline),
-            (r"<%",         Comment.Preproc,    move_state("slash")),
-            (r".|\n",       Other),
-        ],
-        "string": [
-            (r"\\",         String.Escape,      move_state("string_e")),
-            (r"\"",         String,             move_state("slash")),
-            (r"#\{",        String.Interpol,    "slash"),
-            (r'.|\n',       String),
-        ],
-        "string_e": [
-            (r'n',                  String.Escape,      move_state("string")),
-            (r't',                  String.Escape,      move_state("string")),
-            (r'r',                  String.Escape,      move_state("string")),
-            (r'e',                  String.Escape,      move_state("string")),
-            (r'x[a-fA-F0-9]{2}',    String.Escape,      move_state("string")),
-            (r'.',                  String.Escape,      move_state("string")),
-        ],
-        "regexp": [
-            (r'}[a-z]*',            String.Regex,       move_state("slash")),
-            (r'\\(.|\n)',           String.Regex),
-            (r'{',                  String.Regex,       "regexp_r"),
-            (r'.|\n',               String.Regex),
-        ],
-        "regexp_r": [
-            (r'}[a-z]*',            String.Regex,       "#pop"),
-            (r'\\(.|\n)',           String.Regex),
-            (r'{',                  String.Regex,       "regexp_r"),
-        ],
-        "slash": [
-            (r"%>",                     Comment.Preproc,    move_state("root")),
-            (r"\"",                     String,             move_state("string")),
-            (r"'[a-zA-Z0-9_]+",         String),
-            (r'%r{',                    String.Regex,       move_state("regexp")),
-            (r'/\*.*?\*/',              Comment.Multiline),
-            (r"(#|//).*?\n",            Comment.Single),
-            (r'-?[0-9]+e[+-]?[0-9]+',   Number.Float),
-            (r'-?[0-9]+\.[0-9]+(e[+-]?[0-9]+)?', Number.Float),
-            (r'-?[0-9]+',               Number.Integer),
-            (r'nil'+_nkw,               Name.Builtin),
-            (r'true'+_nkw,              Name.Builtin),
-            (r'false'+_nkw,             Name.Builtin),
-            (r'self'+_nkw,              Name.Builtin),
-            (r'(class)(\s+)([A-Z][a-zA-Z0-9_\']*)',
-                bygroups(Keyword, Whitespace, Name.Class)),
-            (r'class'+_nkw,             Keyword),
-            (r'extends'+_nkw,           Keyword),
-            (r'(def)(\s+)(self)(\s*)(\.)(\s*)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
-                bygroups(Keyword, Whitespace, Name.Builtin, Whitespace, Punctuation, Whitespace, Name.Function)),
-            (r'(def)(\s+)([a-z_][a-zA-Z0-9_\']*=?|<<|>>|==|<=>|<=|<|>=|>|\+|-(self)?|~(self)?|\*|/|%|^|&&|&|\||\[\]=?)',
-                bygroups(Keyword, Whitespace, Name.Function)),
-            (r'def'+_nkw,               Keyword),
-            (r'if'+_nkw,                Keyword),
-            (r'elsif'+_nkw,             Keyword),
-            (r'else'+_nkw,              Keyword),
-            (r'unless'+_nkw,            Keyword),
-            (r'for'+_nkw,               Keyword),
-            (r'in'+_nkw,                Keyword),
-            (r'while'+_nkw,             Keyword),
-            (r'until'+_nkw,             Keyword),
-            (r'and'+_nkw,               Keyword),
-            (r'or'+_nkw,                Keyword),
-            (r'not'+_nkw,               Keyword),
-            (r'lambda'+_nkw,            Keyword),
-            (r'try'+_nkw,               Keyword),
-            (r'catch'+_nkw,             Keyword),
-            (r'return'+_nkw,            Keyword),
-            (r'next'+_nkw,              Keyword),
-            (r'last'+_nkw,              Keyword),
-            (r'throw'+_nkw,             Keyword),
-            (r'use'+_nkw,               Keyword),
-            (r'switch'+_nkw,            Keyword),
-            (r'\\',                     Keyword),
-            (r'λ',                      Keyword),
-            (r'__FILE__'+_nkw,          Name.Builtin.Pseudo),
-            (r'__LINE__'+_nkw,          Name.Builtin.Pseudo),
-            (r'[A-Z][a-zA-Z0-9_\']*'+_nkw, Name.Constant),
-            (r'[a-z_][a-zA-Z0-9_\']*'+_nkw, Name),
-            (r'@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Instance),
-            (r'@@[a-z_][a-zA-Z0-9_\']*'+_nkw, Name.Variable.Class),
-            (r'\(',                     Punctuation),
-            (r'\)',                     Punctuation),
-            (r'\[',                     Punctuation),
-            (r'\]',                     Punctuation),
-            (r'\{',                     Punctuation),
-            (r'\}',                     right_angle_bracket),
-            (r';',                      Punctuation),
-            (r',',                      Punctuation),
-            (r'<<=',                    Operator),
-            (r'>>=',                    Operator),
-            (r'<<',                     Operator),
-            (r'>>',                     Operator),
-            (r'==',                     Operator),
-            (r'!=',                     Operator),
-            (r'=>',                     Operator),
-            (r'=',                      Operator),
-            (r'<=>',                    Operator),
-            (r'<=',                     Operator),
-            (r'>=',                     Operator),
-            (r'<',                      Operator),
-            (r'>',                      Operator),
-            (r'\+\+',                   Operator),
-            (r'\+=',                    Operator),
-            (r'-=',                     Operator),
-            (r'\*\*=',                  Operator),
-            (r'\*=',                    Operator),
-            (r'\*\*',                   Operator),
-            (r'\*',                     Operator),
-            (r'/=',                     Operator),
-            (r'\+',                     Operator),
-            (r'-',                      Operator),
-            (r'/',                      Operator),
-            (r'%=',                     Operator),
-            (r'%',                      Operator),
-            (r'^=',                     Operator),
-            (r'&&=',                    Operator),
-            (r'&=',                     Operator),
-            (r'&&',                     Operator),
-            (r'&',                      Operator),
-            (r'\|\|=',                  Operator),
-            (r'\|=',                    Operator),
-            (r'\|\|',                   Operator),
-            (r'\|',                     Operator),
-            (r'!',                      Operator),
-            (r'\.\.\.',                 Operator),
-            (r'\.\.',                   Operator),
-            (r'\.',                     Operator),
-            (r'::',                     Operator),
-            (r':',                      Operator),
-            (r'(\s|\n)+',               Whitespace),
-            (r'[a-z_][a-zA-Z0-9_\']*',  Name.Variable),
-        ],
-    }
-
-
-class SlashLexer(DelegatingLexer):
-    """
-    Lexer for the Slash programming language.
-
-    .. versionadded:: 2.4
-    """
-
-    name = 'Slash'
-    aliases = ['slash']
-    filenames = ['*.sla']
-
-    def __init__(self, **options):
-        from pygments.lexers.web import HtmlLexer
-        super().__init__(HtmlLexer, SlashLanguageLexer, **options)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py b/venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py
deleted file mode 100644
index 58d870e..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/smalltalk.py
+++ /dev/null
@@ -1,196 +0,0 @@
-"""
-    pygments.lexers.smalltalk
-    ~~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Smalltalk and related languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, bygroups, default
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['SmalltalkLexer', 'NewspeakLexer']
-
-
-class SmalltalkLexer(RegexLexer):
-    """
-    For Smalltalk syntax.
-    Contributed by Stefan Matthias Aust.
-    Rewritten by Nils Winter.
-
-    .. versionadded:: 0.10
-    """
-    name = 'Smalltalk'
-    url = 'http://www.smalltalk.org/'
-    filenames = ['*.st']
-    aliases = ['smalltalk', 'squeak', 'st']
-    mimetypes = ['text/x-smalltalk']
-
-    tokens = {
-        'root': [
-            (r'(<)(\w+:)(.*?)(>)', bygroups(Text, Keyword, Text, Text)),
-            include('squeak fileout'),
-            include('whitespaces'),
-            include('method definition'),
-            (r'(\|)([\w\s]*)(\|)', bygroups(Operator, Name.Variable, Operator)),
-            include('objects'),
-            (r'\^|\:=|\_', Operator),
-            # temporaries
-            (r'[\]({}.;!]', Text),
-        ],
-        'method definition': [
-            # Not perfect can't allow whitespaces at the beginning and the
-            # without breaking everything
-            (r'([a-zA-Z]+\w*:)(\s*)(\w+)',
-             bygroups(Name.Function, Text, Name.Variable)),
-            (r'^(\b[a-zA-Z]+\w*\b)(\s*)$', bygroups(Name.Function, Text)),
-            (r'^([-+*/\\~<>=|&!?,@%]+)(\s*)(\w+)(\s*)$',
-             bygroups(Name.Function, Text, Name.Variable, Text)),
-        ],
-        'blockvariables': [
-            include('whitespaces'),
-            (r'(:)(\s*)(\w+)',
-             bygroups(Operator, Text, Name.Variable)),
-            (r'\|', Operator, '#pop'),
-            default('#pop'),  # else pop
-        ],
-        'literals': [
-            (r"'(''|[^'])*'", String, 'afterobject'),
-            (r'\$.', String.Char, 'afterobject'),
-            (r'#\(', String.Symbol, 'parenth'),
-            (r'\)', Text, 'afterobject'),
-            (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number, 'afterobject'),
-        ],
-        '_parenth_helper': [
-            include('whitespaces'),
-            (r'(\d+r)?-?\d+(\.\d+)?(e-?\d+)?', Number),
-            (r'[-+*/\\~<>=|&#!?,@%\w:]+', String.Symbol),
-            # literals
-            (r"'(''|[^'])*'", String),
-            (r'\$.', String.Char),
-            (r'#*\(', String.Symbol, 'inner_parenth'),
-        ],
-        'parenth': [
-            # This state is a bit tricky since
-            # we can't just pop this state
-            (r'\)', String.Symbol, ('root', 'afterobject')),
-            include('_parenth_helper'),
-        ],
-        'inner_parenth': [
-            (r'\)', String.Symbol, '#pop'),
-            include('_parenth_helper'),
-        ],
-        'whitespaces': [
-            # skip whitespace and comments
-            (r'\s+', Text),
-            (r'"(""|[^"])*"', Comment),
-        ],
-        'objects': [
-            (r'\[', Text, 'blockvariables'),
-            (r'\]', Text, 'afterobject'),
-            (r'\b(self|super|true|false|nil|thisContext)\b',
-             Name.Builtin.Pseudo, 'afterobject'),
-            (r'\b[A-Z]\w*(?!:)\b', Name.Class, 'afterobject'),
-            (r'\b[a-z]\w*(?!:)\b', Name.Variable, 'afterobject'),
-            (r'#("(""|[^"])*"|[-+*/\\~<>=|&!?,@%]+|[\w:]+)',
-             String.Symbol, 'afterobject'),
-            include('literals'),
-        ],
-        'afterobject': [
-            (r'! !$', Keyword, '#pop'),  # squeak chunk delimiter
-            include('whitespaces'),
-            (r'\b(ifTrue:|ifFalse:|whileTrue:|whileFalse:|timesRepeat:)',
-             Name.Builtin, '#pop'),
-            (r'\b(new\b(?!:))', Name.Builtin),
-            (r'\:=|\_', Operator, '#pop'),
-            (r'\b[a-zA-Z]+\w*:', Name.Function, '#pop'),
-            (r'\b[a-zA-Z]+\w*', Name.Function),
-            (r'\w+:?|[-+*/\\~<>=|&!?,@%]+', Name.Function, '#pop'),
-            (r'\.', Punctuation, '#pop'),
-            (r';', Punctuation),
-            (r'[\])}]', Text),
-            (r'[\[({]', Text, '#pop'),
-        ],
-        'squeak fileout': [
-            # Squeak fileout format (optional)
-            (r'^"(""|[^"])*"!', Keyword),
-            (r"^'(''|[^'])*'!", Keyword),
-            (r'^(!)(\w+)( commentStamp: )(.*?)( prior: .*?!\n)(.*?)(!)',
-                bygroups(Keyword, Name.Class, Keyword, String, Keyword, Text, Keyword)),
-            (r"^(!)(\w+(?: class)?)( methodsFor: )('(?:''|[^'])*')(.*?!)",
-                bygroups(Keyword, Name.Class, Keyword, String, Keyword)),
-            (r'^(\w+)( subclass: )(#\w+)'
-             r'(\s+instanceVariableNames: )(.*?)'
-             r'(\s+classVariableNames: )(.*?)'
-             r'(\s+poolDictionaries: )(.*?)'
-             r'(\s+category: )(.*?)(!)',
-                bygroups(Name.Class, Keyword, String.Symbol, Keyword, String, Keyword,
-                         String, Keyword, String, Keyword, String, Keyword)),
-            (r'^(\w+(?: class)?)(\s+instanceVariableNames: )(.*?)(!)',
-                bygroups(Name.Class, Keyword, String, Keyword)),
-            (r'(!\n)(\].*)(! !)$', bygroups(Keyword, Text, Keyword)),
-            (r'! !$', Keyword),
-        ],
-    }
-
-
-class NewspeakLexer(RegexLexer):
-    """
-    For Newspeak syntax.
-
-    .. versionadded:: 1.1
-    """
-    name = 'Newspeak'
-    url = 'http://newspeaklanguage.org/'
-    filenames = ['*.ns2']
-    aliases = ['newspeak', ]
-    mimetypes = ['text/x-newspeak']
-
-    tokens = {
-        'root': [
-            (r'\b(Newsqueak2)\b', Keyword.Declaration),
-            (r"'[^']*'", String),
-            (r'\b(class)(\s+)(\w+)(\s*)',
-             bygroups(Keyword.Declaration, Text, Name.Class, Text)),
-            (r'\b(mixin|self|super|private|public|protected|nil|true|false)\b',
-             Keyword),
-            (r'(\w+\:)(\s*)([a-zA-Z_]\w+)',
-             bygroups(Name.Function, Text, Name.Variable)),
-            (r'(\w+)(\s*)(=)',
-             bygroups(Name.Attribute, Text, Operator)),
-            (r'<\w+>', Comment.Special),
-            include('expressionstat'),
-            include('whitespace')
-        ],
-
-        'expressionstat': [
-            (r'(\d+\.\d*|\.\d+|\d+[fF])[fF]?', Number.Float),
-            (r'\d+', Number.Integer),
-            (r':\w+', Name.Variable),
-            (r'(\w+)(::)', bygroups(Name.Variable, Operator)),
-            (r'\w+:', Name.Function),
-            (r'\w+', Name.Variable),
-            (r'\(|\)', Punctuation),
-            (r'\[|\]', Punctuation),
-            (r'\{|\}', Punctuation),
-
-            (r'(\^|\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-|:)', Operator),
-            (r'\.|;', Punctuation),
-            include('whitespace'),
-            include('literals'),
-        ],
-        'literals': [
-            (r'\$.', String),
-            (r"'[^']*'", String),
-            (r"#'[^']*'", String.Symbol),
-            (r"#\w+:?", String.Symbol),
-            (r"#(\+|\/|~|\*|<|>|=|@|%|\||&|\?|!|,|-)+", String.Symbol)
-        ],
-        'whitespace': [
-            (r'\s+', Text),
-            (r'"[^"]*"', Comment)
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/smithy.py b/venv/lib/python3.11/site-packages/pygments/lexers/smithy.py
deleted file mode 100644
index 3f48bfa..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/smithy.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-    pygments.lexers.smithy
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the Smithy IDL.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Keyword, Name, String, \
-    Number, Whitespace, Punctuation
-
-__all__ = ['SmithyLexer']
-
-
-class SmithyLexer(RegexLexer):
-    """
-    For Smithy IDL
-
-    .. versionadded:: 2.10
-    """
-    name = 'Smithy'
-    url = 'https://awslabs.github.io/smithy/'
-    filenames = ['*.smithy']
-    aliases = ['smithy']
-
-    unquoted = r'[A-Za-z0-9_\.#$-]+'
-    identifier = r"[A-Za-z0-9_\.#$-]+"
-
-    simple_shapes = (
-        'use', 'byte', 'short', 'integer', 'long', 'float', 'document',
-        'double', 'bigInteger', 'bigDecimal', 'boolean', 'blob', 'string',
-        'timestamp',
-    )
-
-    aggregate_shapes = (
-       'apply', 'list', 'map', 'set', 'structure', 'union', 'resource',
-       'operation', 'service', 'trait'
-    )
-
-    tokens = {
-        'root': [
-            (r'///.*$', Comment.Multiline),
-            (r'//.*$', Comment),
-            (r'@[0-9a-zA-Z\.#-]*', Name.Decorator),
-            (r'(=)', Name.Decorator),
-            (r'^(\$version)(:)(.+)',
-                bygroups(Keyword.Declaration, Name.Decorator, Name.Class)),
-            (r'^(namespace)(\s+' + identifier + r')\b',
-                bygroups(Keyword.Declaration, Name.Class)),
-            (words(simple_shapes,
-                   prefix=r'^', suffix=r'(\s+' + identifier + r')\b'),
-                bygroups(Keyword.Declaration, Name.Class)),
-            (words(aggregate_shapes,
-                   prefix=r'^', suffix=r'(\s+' + identifier + r')'),
-                bygroups(Keyword.Declaration, Name.Class)),
-            (r'^(metadata)(\s+)((?:\S+)|(?:\"[^"]+\"))(\s*)(=)',
-                bygroups(Keyword.Declaration, Whitespace, Name.Class,
-                         Whitespace, Name.Decorator)),
-            (r"(true|false|null)", Keyword.Constant),
-            (r"(-?(?:0|[1-9]\d*)(?:\.\d+)?(?:[eE][+-]?\d+)?)", Number),
-            (identifier + ":", Name.Label),
-            (identifier, Name.Variable.Class),
-            (r'\[', Text, "#push"),
-            (r'\]', Text, "#pop"),
-            (r'\(', Text, "#push"),
-            (r'\)', Text, "#pop"),
-            (r'\{', Text, "#push"),
-            (r'\}', Text, "#pop"),
-            (r'"{3}(\\\\|\n|\\")*"{3}', String.Doc),
-            (r'"(\\\\|\n|\\"|[^"])*"', String.Double),
-            (r"'(\\\\|\n|\\'|[^'])*'", String.Single),
-            (r'[:,]+', Punctuation),
-            (r'\s+', Whitespace),
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/smv.py b/venv/lib/python3.11/site-packages/pygments/lexers/smv.py
deleted file mode 100644
index 2584086..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/smv.py
+++ /dev/null
@@ -1,78 +0,0 @@
-"""
-    pygments.lexers.smv
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the SMV languages.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
-    Punctuation, Text
-
-__all__ = ['NuSMVLexer']
-
-
-class NuSMVLexer(RegexLexer):
-    """
-    Lexer for the NuSMV language.
-
-    .. versionadded:: 2.2
-    """
-
-    name = 'NuSMV'
-    aliases = ['nusmv']
-    filenames = ['*.smv']
-    mimetypes = []
-
-    tokens = {
-        'root': [
-            # Comments
-            (r'(?s)\/\-\-.*?\-\-/', Comment),
-            (r'--.*\n', Comment),
-
-            # Reserved
-            (words(('MODULE', 'DEFINE', 'MDEFINE', 'CONSTANTS', 'VAR', 'IVAR',
-                    'FROZENVAR', 'INIT', 'TRANS', 'INVAR', 'SPEC', 'CTLSPEC',
-                    'LTLSPEC', 'PSLSPEC', 'COMPUTE', 'NAME', 'INVARSPEC',
-                    'FAIRNESS', 'JUSTICE', 'COMPASSION', 'ISA', 'ASSIGN',
-                    'CONSTRAINT', 'SIMPWFF', 'CTLWFF', 'LTLWFF', 'PSLWFF',
-                    'COMPWFF', 'IN', 'MIN', 'MAX', 'MIRROR', 'PRED',
-                    'PREDICATES'), suffix=r'(?![\w$#-])'),
-             Keyword.Declaration),
-            (r'process(?![\w$#-])', Keyword),
-            (words(('array', 'of', 'boolean', 'integer', 'real', 'word'),
-                   suffix=r'(?![\w$#-])'), Keyword.Type),
-            (words(('case', 'esac'), suffix=r'(?![\w$#-])'), Keyword),
-            (words(('word1', 'bool', 'signed', 'unsigned', 'extend', 'resize',
-                    'sizeof', 'uwconst', 'swconst', 'init', 'self', 'count',
-                    'abs', 'max', 'min'), suffix=r'(?![\w$#-])'),
-             Name.Builtin),
-            (words(('EX', 'AX', 'EF', 'AF', 'EG', 'AG', 'E', 'F', 'O', 'G',
-                    'H', 'X', 'Y', 'Z', 'A', 'U', 'S', 'V', 'T', 'BU', 'EBF',
-                    'ABF', 'EBG', 'ABG', 'next', 'mod', 'union', 'in', 'xor',
-                    'xnor'), suffix=r'(?![\w$#-])'),
-                Operator.Word),
-            (words(('TRUE', 'FALSE'), suffix=r'(?![\w$#-])'), Keyword.Constant),
-
-            # Names
-            (r'[a-zA-Z_][\w$#-]*', Name.Variable),
-
-            # Operators
-            (r':=', Operator),
-            (r'[-&|+*/<>!=]', Operator),
-
-            # Literals
-            (r'\-?\d+\b', Number.Integer),
-            (r'0[su][bB]\d*_[01_]+', Number.Bin),
-            (r'0[su][oO]\d*_[0-7_]+', Number.Oct),
-            (r'0[su][dD]\d*_[\d_]+', Number.Decimal),
-            (r'0[su][hH]\d*_[\da-fA-F_]+', Number.Hex),
-
-            # Whitespace, punctuation and the rest
-            (r'\s+', Text.Whitespace),
-            (r'[()\[\]{};?:.,]', Punctuation),
-        ],
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/snobol.py b/venv/lib/python3.11/site-packages/pygments/lexers/snobol.py
deleted file mode 100644
index 28087de..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/snobol.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""
-    pygments.lexers.snobol
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the SNOBOL language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation
-
-__all__ = ['SnobolLexer']
-
-
-class SnobolLexer(RegexLexer):
-    """
-    Lexer for the SNOBOL4 programming language.
-
-    Recognizes the common ASCII equivalents of the original SNOBOL4 operators.
-    Does not require spaces around binary operators.
-
-    .. versionadded:: 1.5
-    """
-
-    name = "Snobol"
-    aliases = ["snobol"]
-    filenames = ['*.snobol']
-    mimetypes = ['text/x-snobol']
-
-    tokens = {
-        # root state, start of line
-        # comments, continuation lines, and directives start in column 1
-        # as do labels
-        'root': [
-            (r'\*.*\n', Comment),
-            (r'[+.] ', Punctuation, 'statement'),
-            (r'-.*\n', Comment),
-            (r'END\s*\n', Name.Label, 'heredoc'),
-            (r'[A-Za-z$][\w$]*', Name.Label, 'statement'),
-            (r'\s+', Text, 'statement'),
-        ],
-        # statement state, line after continuation or label
-        'statement': [
-            (r'\s*\n', Text, '#pop'),
-            (r'\s+', Text),
-            (r'(?<=[^\w.])(LT|LE|EQ|NE|GE|GT|INTEGER|IDENT|DIFFER|LGT|SIZE|'
-             r'REPLACE|TRIM|DUPL|REMDR|DATE|TIME|EVAL|APPLY|OPSYN|LOAD|UNLOAD|'
-             r'LEN|SPAN|BREAK|ANY|NOTANY|TAB|RTAB|REM|POS|RPOS|FAIL|FENCE|'
-             r'ABORT|ARB|ARBNO|BAL|SUCCEED|INPUT|OUTPUT|TERMINAL)(?=[^\w.])',
-             Name.Builtin),
-            (r'[A-Za-z][\w.]*', Name),
-            # ASCII equivalents of original operators
-            # | for the EBCDIC equivalent, ! likewise
-            # \ for EBCDIC negation
-            (r'\*\*|[?$.!%*/#+\-@|&\\=]', Operator),
-            (r'"[^"]*"', String),
-            (r"'[^']*'", String),
-            # Accept SPITBOL syntax for real numbers
-            # as well as Macro SNOBOL4
-            (r'[0-9]+(?=[^.EeDd])', Number.Integer),
-            (r'[0-9]+(\.[0-9]*)?([EDed][-+]?[0-9]+)?', Number.Float),
-            # Goto
-            (r':', Punctuation, 'goto'),
-            (r'[()<>,;]', Punctuation),
-        ],
-        # Goto block
-        'goto': [
-            (r'\s*\n', Text, "#pop:2"),
-            (r'\s+', Text),
-            (r'F|S', Keyword),
-            (r'(\()([A-Za-z][\w.]*)(\))',
-             bygroups(Punctuation, Name.Label, Punctuation))
-        ],
-        # everything after the END statement is basically one
-        # big heredoc.
-        'heredoc': [
-            (r'.*\n', String.Heredoc)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/solidity.py b/venv/lib/python3.11/site-packages/pygments/lexers/solidity.py
deleted file mode 100644
index f1654e4..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/solidity.py
+++ /dev/null
@@ -1,87 +0,0 @@
-"""
-    pygments.lexers.solidity
-    ~~~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for Solidity.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, include, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Whitespace
-
-__all__ = ['SolidityLexer']
-
-
-class SolidityLexer(RegexLexer):
-    """
-    For Solidity source code.
-
-    .. versionadded:: 2.5
-    """
-
-    name = 'Solidity'
-    aliases = ['solidity']
-    filenames = ['*.sol']
-    mimetypes = []
-
-    datatype = (
-        r'\b(address|bool|(?:(?:bytes|hash|int|string|uint)(?:8|16|24|32|40|48|56|64'
-        r'|72|80|88|96|104|112|120|128|136|144|152|160|168|176|184|192|200|208'
-        r'|216|224|232|240|248|256)?))\b'
-    )
-
-    tokens = {
-        'root': [
-            include('whitespace'),
-            include('comments'),
-            (r'\bpragma\s+solidity\b', Keyword, 'pragma'),
-            (r'\b(contract)(\s+)([a-zA-Z_]\w*)',
-             bygroups(Keyword, Whitespace, Name.Entity)),
-            (datatype + r'(\s+)((?:external|public|internal|private)\s+)?' +
-             r'([a-zA-Z_]\w*)',
-             bygroups(Keyword.Type, Whitespace, Keyword, Name.Variable)),
-            (r'\b(enum|event|function|struct)(\s+)([a-zA-Z_]\w*)',
-             bygroups(Keyword.Type, Whitespace, Name.Variable)),
-            (r'\b(msg|block|tx)\.([A-Za-z_][a-zA-Z0-9_]*)\b', Keyword),
-            (words((
-                'block', 'break', 'constant', 'constructor', 'continue',
-                'contract', 'do', 'else', 'external', 'false', 'for',
-                'function', 'if', 'import', 'inherited', 'internal', 'is',
-                'library', 'mapping', 'memory', 'modifier', 'msg', 'new',
-                'payable', 'private', 'public', 'require', 'return',
-                'returns', 'struct', 'suicide', 'throw', 'this', 'true',
-                'tx', 'var', 'while'), prefix=r'\b', suffix=r'\b'),
-             Keyword.Type),
-            (words(('keccak256',), prefix=r'\b', suffix=r'\b'), Name.Builtin),
-            (datatype, Keyword.Type),
-            include('constants'),
-            (r'[a-zA-Z_]\w*', Text),
-            (r'[~!%^&*+=|?:<>/-]', Operator),
-            (r'[.;{}(),\[\]]', Punctuation)
-        ],
-        'comments': [
-            (r'//(\n|[\w\W]*?[^\\]\n)', Comment.Single),
-            (r'/(\\\n)?[*][\w\W]*?[*](\\\n)?/', Comment.Multiline),
-            (r'/(\\\n)?[*][\w\W]*', Comment.Multiline)
-        ],
-        'constants': [
-            (r'("(\\"|.)*?")', String.Double),
-            (r"('(\\'|.)*?')", String.Single),
-            (r'\b0[xX][0-9a-fA-F]+\b', Number.Hex),
-            (r'\b\d+\b', Number.Decimal),
-        ],
-        'pragma': [
-            include('whitespace'),
-            include('comments'),
-            (r'(\^|>=|<)(\s*)(\d+\.\d+\.\d+)',
-             bygroups(Operator, Whitespace, Keyword)),
-            (r';', Punctuation, '#pop')
-        ],
-        'whitespace': [
-            (r'\s+', Whitespace),
-            (r'\n', Whitespace)
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sophia.py b/venv/lib/python3.11/site-packages/pygments/lexers/sophia.py
deleted file mode 100644
index fc4928c..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sophia.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""
-    pygments.lexers.sophia
-    ~~~~~~~~~~~~~~~~~~~~~~
-
-    Lexer for Sophia.
-
-    Derived from pygments/lexers/reason.py.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, include, default, words
-from pygments.token import Comment, Keyword, Name, Number, Operator, \
-    Punctuation, String, Text
-
-__all__ = ['SophiaLexer']
-
-class SophiaLexer(RegexLexer):
-    """
-    A Sophia lexer.
-
-    .. versionadded:: 2.11
-    """
-
-    name = 'Sophia'
-    aliases = ['sophia']
-    filenames = ['*.aes']
-    mimetypes = []
-
-    keywords = (
-        'contract', 'include', 'let', 'switch', 'type', 'record', 'datatype',
-        'if', 'elif', 'else', 'function', 'stateful', 'payable', 'public',
-        'entrypoint', 'private', 'indexed', 'namespace', 'interface', 'main',
-        'using', 'as', 'for', 'hiding',
-    )
-
-    builtins = ('state', 'put', 'abort', 'require')
-
-    word_operators = ('mod', 'band', 'bor', 'bxor', 'bnot')
-
-    primitive_types = ('int', 'address', 'bool', 'bits', 'bytes', 'string',
-                       'list', 'option', 'char', 'unit', 'map', 'event',
-                       'hash', 'signature', 'oracle', 'oracle_query')
-
-    tokens = {
-        'escape-sequence': [
-            (r'\\[\\"\'ntbr]', String.Escape),
-            (r'\\[0-9]{3}', String.Escape),
-            (r'\\x[0-9a-fA-F]{2}', String.Escape),
-        ],
-        'root': [
-            (r'\s+', Text.Whitespace),
-            (r'(true|false)\b', Keyword.Constant),
-            (r'\b([A-Z][\w\']*)(?=\s*\.)', Name.Class, 'dotted'),
-            (r'\b([A-Z][\w\']*)', Name.Function),
-            (r'//.*?\n', Comment.Single),
-            (r'\/\*(?!/)', Comment.Multiline, 'comment'),
-
-            (r'0[xX][\da-fA-F][\da-fA-F_]*', Number.Hex),
-            (r'#[\da-fA-F][\da-fA-F_]*', Name.Label),
-            (r'\d[\d_]*', Number.Integer),
-
-            (words(keywords, suffix=r'\b'), Keyword),
-            (words(builtins, suffix=r'\b'), Name.Builtin),
-            (words(word_operators, prefix=r'\b', suffix=r'\b'), Operator.Word),
-            (words(primitive_types, prefix=r'\b', suffix=r'\b'), Keyword.Type),
-
-            (r'[=!<>+\\*/:&|?~@^-]', Operator.Word),
-            (r'[.;:{}(),\[\]]', Punctuation),
-
-            (r"(ak_|ok_|oq_|ct_)[\w']*", Name.Label),
-            (r"[^\W\d][\w']*", Name),
-
-            (r"'(?:(\\[\\\"'ntbr ])|(\\[0-9]{3})|(\\x[0-9a-fA-F]{2}))'",
-             String.Char),
-            (r"'.'", String.Char),
-            (r"'[a-z][\w]*", Name.Variable),
-
-            (r'"', String.Double, 'string')
-        ],
-        'comment': [
-            (r'[^/*]+', Comment.Multiline),
-            (r'\/\*', Comment.Multiline, '#push'),
-            (r'\*\/', Comment.Multiline, '#pop'),
-            (r'\*', Comment.Multiline),
-        ],
-        'string': [
-            (r'[^\\"]+', String.Double),
-            include('escape-sequence'),
-            (r'\\\n', String.Double),
-            (r'"', String.Double, '#pop'),
-        ],
-        'dotted': [
-            (r'\s+', Text),
-            (r'\.', Punctuation),
-            (r'[A-Z][\w\']*(?=\s*\.)', Name.Function),
-            (r'[A-Z][\w\']*', Name.Function, '#pop'),
-            (r'[a-z_][\w\']*', Name, '#pop'),
-            default('#pop'),
-        ],
-    }
-
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/special.py b/venv/lib/python3.11/site-packages/pygments/lexers/special.py
deleted file mode 100644
index 45565ac..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/special.py
+++ /dev/null
@@ -1,116 +0,0 @@
-"""
-    pygments.lexers.special
-    ~~~~~~~~~~~~~~~~~~~~~~~
-
-    Special lexers.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-import ast
-
-from pygments.lexer import Lexer, line_re
-from pygments.token import Token, Error, Text, Generic
-from pygments.util import get_choice_opt
-
-
-__all__ = ['TextLexer', 'OutputLexer', 'RawTokenLexer']
-
-
-class TextLexer(Lexer):
-    """
-    "Null" lexer, doesn't highlight anything.
-    """
-    name = 'Text only'
-    aliases = ['text']
-    filenames = ['*.txt']
-    mimetypes = ['text/plain']
-    priority = 0.01
-
-    def get_tokens_unprocessed(self, text):
-        yield 0, Text, text
-
-    def analyse_text(text):
-        return TextLexer.priority
-
-
-class OutputLexer(Lexer):
-    """
-    Simple lexer that highlights everything as ``Token.Generic.Output``.
-
-    .. versionadded:: 2.10
-    """
-    name = 'Text output'
-    aliases = ['output']
-
-    def get_tokens_unprocessed(self, text):
-        yield 0, Generic.Output, text
-
-
-_ttype_cache = {}
-
-
-class RawTokenLexer(Lexer):
-    """
-    Recreate a token stream formatted with the `RawTokenFormatter`.
-
-    Additional options accepted:
-
-    `compress`
-        If set to ``"gz"`` or ``"bz2"``, decompress the token stream with
-        the given compression algorithm before lexing (default: ``""``).
-    """
-    name = 'Raw token data'
-    aliases = []
-    filenames = []
-    mimetypes = ['application/x-pygments-tokens']
-
-    def __init__(self, **options):
-        self.compress = get_choice_opt(options, 'compress',
-                                       ['', 'none', 'gz', 'bz2'], '')
-        Lexer.__init__(self, **options)
-
-    def get_tokens(self, text):
-        if self.compress:
-            if isinstance(text, str):
-                text = text.encode('latin1')
-            try:
-                if self.compress == 'gz':
-                    import gzip
-                    text = gzip.decompress(text)
-                elif self.compress == 'bz2':
-                    import bz2
-                    text = bz2.decompress(text)
-            except OSError:
-                yield Error, text.decode('latin1')
-        if isinstance(text, bytes):
-            text = text.decode('latin1')
-
-        # do not call Lexer.get_tokens() because stripping is not optional.
-        text = text.strip('\n') + '\n'
-        for i, t, v in self.get_tokens_unprocessed(text):
-            yield t, v
-
-    def get_tokens_unprocessed(self, text):
-        length = 0
-        for match in line_re.finditer(text):
-            try:
-                ttypestr, val = match.group().rstrip().split('\t', 1)
-                ttype = _ttype_cache.get(ttypestr)
-                if not ttype:
-                    ttype = Token
-                    ttypes = ttypestr.split('.')[1:]
-                    for ttype_ in ttypes:
-                        if not ttype_ or not ttype_[0].isupper():
-                            raise ValueError('malformed token name')
-                        ttype = getattr(ttype, ttype_)
-                    _ttype_cache[ttypestr] = ttype
-                val = ast.literal_eval(val)
-                if not isinstance(val, str):
-                    raise ValueError('expected str')
-            except (SyntaxError, ValueError):
-                val = match.group()
-                ttype = Error
-            yield length, ttype, val
-            length += len(val)
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/spice.py b/venv/lib/python3.11/site-packages/pygments/lexers/spice.py
deleted file mode 100644
index 5c2d8f2..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/spice.py
+++ /dev/null
@@ -1,70 +0,0 @@
-"""
-    pygments.lexers.spice
-    ~~~~~~~~~~~~~~~~~~~~~
-
-    Lexers for the Spice programming language.
-
-    :copyright: Copyright 2006-2023 by the Pygments team, see AUTHORS.
-    :license: BSD, see LICENSE for details.
-"""
-
-from pygments.lexer import RegexLexer, bygroups, words
-from pygments.token import Text, Comment, Operator, Keyword, Name, String, \
-    Number, Punctuation, Whitespace
-
-__all__ = ['SpiceLexer']
-
-
-class SpiceLexer(RegexLexer):
-    """
-    For Spice source.
-
-    .. versionadded:: 2.11
-    """
-    name = 'Spice'
-    url = 'https://www.spicelang.com'
-    filenames = ['*.spice']
-    aliases = ['spice', 'spicelang']
-    mimetypes = ['text/x-spice']
-
-    tokens = {
-        'root': [
-            (r'\n', Whitespace),
-            (r'\s+', Whitespace),
-            (r'\\\n', Text),
-            # comments
-            (r'//(.*?)\n', Comment.Single),
-            (r'/(\\\n)?[*]{2}(.|\n)*?[*](\\\n)?/', String.Doc),
-            (r'/(\\\n)?[*](.|\n)*?[*](\\\n)?/', Comment.Multiline),
-            # keywords
-            (r'(import|as)\b', Keyword.Namespace),
-            (r'(f|p|type|struct|interface|enum|alias|operator)\b', Keyword.Declaration),
-            (words(('if', 'else', 'for', 'foreach', 'do', 'while', 'break',
-                    'continue', 'return', 'assert', 'unsafe', 'ext'), suffix=r'\b'), Keyword),
-            (words(('const', 'signed', 'unsigned', 'inline', 'public', 'heap'),
-                   suffix=r'\b'), Keyword.Pseudo),
-            (words(('new', 'switch', 'case', 'yield', 'stash', 'pick', 'sync',
-                    'class'), suffix=r'\b'), Keyword.Reserved),
-            (r'(true|false|nil)\b', Keyword.Constant),
-            (words(('double', 'int', 'short', 'long', 'byte', 'char', 'string',
-                    'bool', 'dyn'), suffix=r'\b'), Keyword.Type),
-            (words(('printf', 'sizeof', 'alignof', 'len', 'panic'), suffix=r'\b(\()'),
-             bygroups(Name.Builtin, Punctuation)),
-            # numeric literals
-            (r'[-]?[0-9]*[.][0-9]+([eE][+-]?[0-9]+)?', Number.Double),
-            (r'0[bB][01]+[slu]?', Number.Bin),
-            (r'0[oO][0-7]+[slu]?', Number.Oct),
-            (r'0[xXhH][0-9a-fA-F]+[slu]?', Number.Hex),
-            (r'(0[dD])?[0-9]+[slu]?', Number.Integer),
-            # string literal
-            (r'"(\\\\|\\[^\\]|[^"\\])*"', String),
-            # char literal
-            (r'\'(\\\\|\\[^\\]|[^\'\\])\'', String.Char),
-            # tokens
-            (r'<<=|>>=|<<|>>|<=|>=|\+=|-=|\*=|/=|\%=|\|=|&=|\^=|&&|\|\||&|\||'
-             r'\+\+|--|\%|\^|\~|==|!=|->|::|[.]{3}|#!|#|[+\-*/&]', Operator),
-            (r'[|<>=!()\[\]{}.,;:\?]', Punctuation),
-            # identifiers
-            (r'[^\W\d]\w*', Name.Other),
-        ]
-    }
diff --git a/venv/lib/python3.11/site-packages/pygments/lexers/sql.py b/venv/lib/python3.11/site-packages/pygments/lexers/sql.py
deleted file mode 100644
index 2880841..0000000
--- a/venv/lib/python3.11/site-packages/pygments/lexers/sql.py
+++ /dev/null
@@ -1,1027 +0,0 @@
-"""
-    pygments.lexers.sql
-    ~~~~~~~~~~~~~~~~~~~
-
-    Lexers for various SQL dialects and related interactive sessions.
-
-    Postgres specific lexers:
-
-    `PostgresLexer`
-        A SQL lexer for the PostgreSQL dialect. Differences w.r.t. the SQL
-        lexer are:
-
-        - keywords and data types list parsed from the PG docs (run the
-          `_postgres_builtins` module to update them);
-        - Content of $-strings parsed using a specific lexer, e.g. the content
-          of a PL/Python function is parsed using the Python lexer;
-        - parse PG specific constructs: E-strings, $-strings, U&-strings,
-          different operators and punctuation.
-
-    `PlPgsqlLexer`
-        A lexer for the PL/pgSQL language. Adds a few specific construct on
-        top of the PG SQL lexer (such as <

' : '\U0001d4ab', - '\\' : '\U0001d4ac', - '\\' : '\U0000211b', - '\\' : '\U0001d4ae', - '\\' : '\U0001d4af', - '\\' : '\U0001d4b0', - '\\' : '\U0001d4b1', - '\\' : '\U0001d4b2', - '\\' : '\U0001d4b3', - '\\' : '\U0001d4b4', - '\\' : '\U0001d4b5', - '\\' : '\U0001d5ba', - '\\' : '\U0001d5bb', - '\\' : '\U0001d5bc', - '\\' : '\U0001d5bd', - '\\' : '\U0001d5be', - '\\' : '\U0001d5bf', - '\\' : '\U0001d5c0', - '\\' : '\U0001d5c1', - '\\' : '\U0001d5c2', - '\\' : '\U0001d5c3', - '\\' : '\U0001d5c4', - '\\' : '\U0001d5c5', - '\\' : '\U0001d5c6', - '\\' : '\U0001d5c7', - '\\' : '\U0001d5c8', - '\\